var/home/core/zuul-output/0000755000175000017500000000000015067765231014541 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015070012257015470 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006347563115070012250017704 0ustar rootrootOct 03 15:27:59 crc systemd[1]: Starting Kubernetes Kubelet... Oct 03 15:27:59 crc restorecon[4726]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:27:59 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 15:28:00 crc restorecon[4726]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 03 15:28:01 crc kubenswrapper[5081]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 15:28:01 crc kubenswrapper[5081]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 03 15:28:01 crc kubenswrapper[5081]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 15:28:01 crc kubenswrapper[5081]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 15:28:01 crc kubenswrapper[5081]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 03 15:28:01 crc kubenswrapper[5081]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.531767 5081 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539324 5081 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539357 5081 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539377 5081 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539389 5081 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539399 5081 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539410 5081 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539478 5081 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539499 5081 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539508 5081 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539518 5081 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539527 5081 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539537 5081 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539545 5081 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539556 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539589 5081 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539599 5081 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539607 5081 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539617 5081 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539625 5081 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539642 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539651 5081 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539660 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539668 5081 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539681 5081 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539694 5081 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539706 5081 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539717 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539727 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539739 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539749 5081 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539760 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539782 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539794 5081 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539807 5081 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539818 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539830 5081 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.539842 5081 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540009 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540024 5081 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540035 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540046 5081 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540062 5081 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540073 5081 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540095 5081 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540106 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540116 5081 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540126 5081 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540137 5081 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540147 5081 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540157 5081 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540166 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540176 5081 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540186 5081 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540198 5081 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540208 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540217 5081 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540236 5081 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540250 5081 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540260 5081 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540270 5081 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540279 5081 feature_gate.go:330] unrecognized feature gate: Example Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540288 5081 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540298 5081 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540308 5081 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540318 5081 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540336 5081 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540349 5081 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540371 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540383 5081 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540393 5081 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.540404 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541440 5081 flags.go:64] FLAG: --address="0.0.0.0" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541736 5081 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541794 5081 flags.go:64] FLAG: --anonymous-auth="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541808 5081 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541820 5081 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541830 5081 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541842 5081 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541854 5081 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541864 5081 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541873 5081 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541883 5081 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541893 5081 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541902 5081 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541911 5081 flags.go:64] FLAG: --cgroup-root="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541920 5081 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541929 5081 flags.go:64] FLAG: --client-ca-file="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541939 5081 flags.go:64] FLAG: --cloud-config="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541947 5081 flags.go:64] FLAG: --cloud-provider="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541957 5081 flags.go:64] FLAG: --cluster-dns="[]" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541974 5081 flags.go:64] FLAG: --cluster-domain="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541983 5081 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.541992 5081 flags.go:64] FLAG: --config-dir="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542000 5081 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542011 5081 flags.go:64] FLAG: --container-log-max-files="5" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542023 5081 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542034 5081 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542044 5081 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542054 5081 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542064 5081 flags.go:64] FLAG: --contention-profiling="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542082 5081 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542099 5081 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542110 5081 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542119 5081 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542132 5081 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542141 5081 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542150 5081 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542158 5081 flags.go:64] FLAG: --enable-load-reader="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542167 5081 flags.go:64] FLAG: --enable-server="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542176 5081 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542188 5081 flags.go:64] FLAG: --event-burst="100" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542197 5081 flags.go:64] FLAG: --event-qps="50" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542206 5081 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542216 5081 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542226 5081 flags.go:64] FLAG: --eviction-hard="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542237 5081 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542247 5081 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542257 5081 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542267 5081 flags.go:64] FLAG: --eviction-soft="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542275 5081 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542284 5081 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542293 5081 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542303 5081 flags.go:64] FLAG: --experimental-mounter-path="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542312 5081 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542321 5081 flags.go:64] FLAG: --fail-swap-on="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542330 5081 flags.go:64] FLAG: --feature-gates="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542342 5081 flags.go:64] FLAG: --file-check-frequency="20s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542351 5081 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542360 5081 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542370 5081 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542379 5081 flags.go:64] FLAG: --healthz-port="10248" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542389 5081 flags.go:64] FLAG: --help="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542399 5081 flags.go:64] FLAG: --hostname-override="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542408 5081 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542417 5081 flags.go:64] FLAG: --http-check-frequency="20s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542426 5081 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542435 5081 flags.go:64] FLAG: --image-credential-provider-config="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542444 5081 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542453 5081 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542462 5081 flags.go:64] FLAG: --image-service-endpoint="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542471 5081 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542480 5081 flags.go:64] FLAG: --kube-api-burst="100" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542489 5081 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542499 5081 flags.go:64] FLAG: --kube-api-qps="50" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542508 5081 flags.go:64] FLAG: --kube-reserved="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542516 5081 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542525 5081 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542534 5081 flags.go:64] FLAG: --kubelet-cgroups="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542543 5081 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542551 5081 flags.go:64] FLAG: --lock-file="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542599 5081 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542609 5081 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542618 5081 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542632 5081 flags.go:64] FLAG: --log-json-split-stream="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542656 5081 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542666 5081 flags.go:64] FLAG: --log-text-split-stream="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542677 5081 flags.go:64] FLAG: --logging-format="text" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542686 5081 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542696 5081 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542704 5081 flags.go:64] FLAG: --manifest-url="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542713 5081 flags.go:64] FLAG: --manifest-url-header="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542734 5081 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542743 5081 flags.go:64] FLAG: --max-open-files="1000000" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542754 5081 flags.go:64] FLAG: --max-pods="110" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542763 5081 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542772 5081 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542781 5081 flags.go:64] FLAG: --memory-manager-policy="None" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542791 5081 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542800 5081 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542809 5081 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542818 5081 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542846 5081 flags.go:64] FLAG: --node-status-max-images="50" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542855 5081 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542864 5081 flags.go:64] FLAG: --oom-score-adj="-999" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542874 5081 flags.go:64] FLAG: --pod-cidr="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542883 5081 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542900 5081 flags.go:64] FLAG: --pod-manifest-path="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542909 5081 flags.go:64] FLAG: --pod-max-pids="-1" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542919 5081 flags.go:64] FLAG: --pods-per-core="0" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542927 5081 flags.go:64] FLAG: --port="10250" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542936 5081 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542945 5081 flags.go:64] FLAG: --provider-id="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542955 5081 flags.go:64] FLAG: --qos-reserved="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542963 5081 flags.go:64] FLAG: --read-only-port="10255" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542972 5081 flags.go:64] FLAG: --register-node="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542981 5081 flags.go:64] FLAG: --register-schedulable="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.542990 5081 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543006 5081 flags.go:64] FLAG: --registry-burst="10" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543016 5081 flags.go:64] FLAG: --registry-qps="5" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543024 5081 flags.go:64] FLAG: --reserved-cpus="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543036 5081 flags.go:64] FLAG: --reserved-memory="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543047 5081 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543056 5081 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543067 5081 flags.go:64] FLAG: --rotate-certificates="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543076 5081 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543085 5081 flags.go:64] FLAG: --runonce="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543094 5081 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543103 5081 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543113 5081 flags.go:64] FLAG: --seccomp-default="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543122 5081 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543131 5081 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543140 5081 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543149 5081 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543167 5081 flags.go:64] FLAG: --storage-driver-password="root" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543176 5081 flags.go:64] FLAG: --storage-driver-secure="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543185 5081 flags.go:64] FLAG: --storage-driver-table="stats" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543194 5081 flags.go:64] FLAG: --storage-driver-user="root" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543203 5081 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543212 5081 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543222 5081 flags.go:64] FLAG: --system-cgroups="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543231 5081 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543246 5081 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543255 5081 flags.go:64] FLAG: --tls-cert-file="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543264 5081 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543275 5081 flags.go:64] FLAG: --tls-min-version="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543284 5081 flags.go:64] FLAG: --tls-private-key-file="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543293 5081 flags.go:64] FLAG: --topology-manager-policy="none" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543302 5081 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543311 5081 flags.go:64] FLAG: --topology-manager-scope="container" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543320 5081 flags.go:64] FLAG: --v="2" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543341 5081 flags.go:64] FLAG: --version="false" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543353 5081 flags.go:64] FLAG: --vmodule="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543364 5081 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.543373 5081 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543656 5081 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543668 5081 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543678 5081 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543686 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543694 5081 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543702 5081 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543710 5081 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543717 5081 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543726 5081 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543734 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543742 5081 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543753 5081 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543760 5081 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543768 5081 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543775 5081 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543783 5081 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543791 5081 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543798 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543806 5081 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543815 5081 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543823 5081 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543831 5081 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543838 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543846 5081 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543854 5081 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543861 5081 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543870 5081 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543878 5081 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543885 5081 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543896 5081 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543906 5081 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543915 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543924 5081 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543933 5081 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543943 5081 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543953 5081 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543963 5081 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543973 5081 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543982 5081 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543990 5081 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.543998 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544006 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544014 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544024 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544032 5081 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544039 5081 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544047 5081 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544055 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544063 5081 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544070 5081 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544078 5081 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544085 5081 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544095 5081 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544103 5081 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544111 5081 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544120 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544127 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544148 5081 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544155 5081 feature_gate.go:330] unrecognized feature gate: Example Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544163 5081 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544171 5081 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544178 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544186 5081 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544193 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544201 5081 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544211 5081 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544221 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544230 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544240 5081 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544248 5081 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.544256 5081 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.544280 5081 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.556661 5081 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.556709 5081 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556831 5081 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556847 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556859 5081 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556872 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556882 5081 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556893 5081 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556904 5081 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556914 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556924 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556934 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556944 5081 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556953 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556962 5081 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556974 5081 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556983 5081 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.556993 5081 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557003 5081 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557014 5081 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557026 5081 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557037 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557047 5081 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557062 5081 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557073 5081 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557084 5081 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557093 5081 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557107 5081 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557115 5081 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557123 5081 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557133 5081 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557143 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557151 5081 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557160 5081 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557168 5081 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557176 5081 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557189 5081 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557199 5081 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557209 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557219 5081 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557267 5081 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557279 5081 feature_gate.go:330] unrecognized feature gate: Example Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557289 5081 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557300 5081 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557310 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557320 5081 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557330 5081 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557340 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557351 5081 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557361 5081 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557374 5081 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557387 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557396 5081 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557407 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557419 5081 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557430 5081 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557441 5081 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557451 5081 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557461 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557471 5081 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557480 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557490 5081 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557500 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557508 5081 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557516 5081 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557523 5081 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557531 5081 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557539 5081 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557546 5081 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557554 5081 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557592 5081 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557601 5081 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557611 5081 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.557624 5081 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557890 5081 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557906 5081 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557918 5081 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557930 5081 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557941 5081 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557951 5081 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557960 5081 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557970 5081 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557981 5081 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.557991 5081 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558000 5081 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558009 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558018 5081 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558029 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558039 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558053 5081 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558064 5081 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558077 5081 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558090 5081 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558100 5081 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558110 5081 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558120 5081 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558129 5081 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558139 5081 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558149 5081 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558159 5081 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558169 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558178 5081 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558188 5081 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558199 5081 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558208 5081 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558219 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558229 5081 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558239 5081 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558253 5081 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558263 5081 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558273 5081 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558283 5081 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558293 5081 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558303 5081 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558314 5081 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558324 5081 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558334 5081 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558345 5081 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558356 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558367 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558377 5081 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558386 5081 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558396 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558405 5081 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558415 5081 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558425 5081 feature_gate.go:330] unrecognized feature gate: Example Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558435 5081 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558444 5081 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558454 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558464 5081 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558473 5081 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558487 5081 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558499 5081 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558510 5081 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558523 5081 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558533 5081 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558544 5081 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558554 5081 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558602 5081 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558613 5081 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558623 5081 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558633 5081 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558643 5081 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558654 5081 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.558667 5081 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.558681 5081 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.560440 5081 server.go:940] "Client rotation is on, will bootstrap in background" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.567902 5081 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.568108 5081 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.570721 5081 server.go:997] "Starting client certificate rotation" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.570773 5081 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.571041 5081 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-08 08:25:00.321701043 +0000 UTC Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.571179 5081 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 856h56m58.750526897s for next certificate rotation Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.614521 5081 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.618533 5081 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.641246 5081 log.go:25] "Validated CRI v1 runtime API" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.684443 5081 log.go:25] "Validated CRI v1 image API" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.686498 5081 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.694444 5081 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-03-15-23-05-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.694501 5081 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.719961 5081 manager.go:217] Machine: {Timestamp:2025-10-03 15:28:01.716375505 +0000 UTC m=+0.681932138 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:57f4f478-2d02-4730-ae6e-811ee98398a9 BootID:7d41c104-ec72-4a46-8cc3-ea2023941cda Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:81:13:cb Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:81:13:cb Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:db:b5:fa Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:84:82:71 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f3:a3:ab Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:9a:a5:b8 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:8b:0c:59 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:66:1e:cf:b4:2c:82 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ce:d1:9a:7e:bc:e0 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.720458 5081 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.720712 5081 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.721091 5081 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.721332 5081 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.721383 5081 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.721641 5081 topology_manager.go:138] "Creating topology manager with none policy" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.721656 5081 container_manager_linux.go:303] "Creating device plugin manager" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.722426 5081 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.722460 5081 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.723422 5081 state_mem.go:36] "Initialized new in-memory state store" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.723874 5081 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.732966 5081 kubelet.go:418] "Attempting to sync node with API server" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.732996 5081 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.733026 5081 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.733042 5081 kubelet.go:324] "Adding apiserver pod source" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.733056 5081 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.738953 5081 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.740210 5081 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.744792 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.744881 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.744937 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.744967 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.746516 5081 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748409 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748446 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748460 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748474 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748493 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748504 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748513 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748527 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748539 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748552 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748593 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.748607 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.750788 5081 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.751381 5081 server.go:1280] "Started kubelet" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.752726 5081 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.752730 5081 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.752969 5081 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.753188 5081 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 03 15:28:01 crc systemd[1]: Started Kubernetes Kubelet. Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.753658 5081 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.753868 5081 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.753881 5081 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.753888 5081 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 01:02:13.583147868 +0000 UTC Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.756977 5081 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1617h34m11.826179196s for next certificate rotation Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.757280 5081 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.757473 5081 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.753887 5081 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.758897 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.759017 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.760706 5081 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.760753 5081 factory.go:55] Registering systemd factory Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.760774 5081 factory.go:221] Registration of the systemd container factory successfully Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.761589 5081 factory.go:153] Registering CRI-O factory Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.761655 5081 factory.go:221] Registration of the crio container factory successfully Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.761594 5081 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.59:6443: connect: connection refused" interval="200ms" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.761701 5081 factory.go:103] Registering Raw factory Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.761773 5081 manager.go:1196] Started watching for new ooms in manager Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.762798 5081 manager.go:319] Starting recovery of all containers Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.771014 5081 server.go:460] "Adding debug handlers to kubelet server" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.776736 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.776798 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.776814 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.776827 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.776841 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.778125 5081 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.59:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186b04bb4336731a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-03 15:28:01.751347994 +0000 UTC m=+0.716904617,LastTimestamp:2025-10-03 15:28:01.751347994 +0000 UTC m=+0.716904617,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.780949 5081 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781038 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781078 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781145 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781189 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781222 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781249 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781276 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781307 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781339 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781371 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781399 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781425 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781453 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781495 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781522 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781549 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781610 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781639 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781665 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781694 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781722 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781756 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781783 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781816 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781842 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781869 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781897 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781928 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781954 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.781982 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782008 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782049 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782077 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782104 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782134 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782193 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782221 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782246 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782276 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782308 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782334 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782363 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782390 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782418 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782446 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782470 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782495 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782535 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782618 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782657 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782687 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782716 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782742 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782765 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782791 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782822 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782847 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782871 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782898 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782923 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782950 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.782977 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783007 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783045 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783076 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783102 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783132 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783165 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783193 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783221 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783249 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783278 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783305 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783331 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783361 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783394 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783421 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783448 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783476 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783504 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783531 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783594 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783629 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783662 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783689 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783717 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783745 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783778 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783808 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783835 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783863 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783891 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783917 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783945 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.783971 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784004 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784036 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784069 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784103 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784145 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784177 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784207 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784243 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784278 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784309 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784340 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784373 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784402 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784433 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784462 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784491 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784517 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784545 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784610 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784641 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784699 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784728 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784754 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784788 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784818 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784846 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784873 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784899 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784931 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784959 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.784987 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785019 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785048 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785076 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785105 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785160 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785193 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785225 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785254 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785280 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785311 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785342 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785371 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785400 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785429 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785457 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785483 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785509 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785538 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.785979 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786022 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786052 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786086 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786114 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786143 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786171 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786290 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786324 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786353 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786382 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786410 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786438 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786468 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786495 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786524 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786551 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786620 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786648 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786677 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786704 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786730 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786756 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786787 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786815 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786841 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786871 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786900 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786929 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786958 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.786989 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787016 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787044 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787073 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787100 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787126 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787152 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787179 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787206 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787261 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787290 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787319 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787346 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787375 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787403 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787430 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787459 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787487 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787514 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787542 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787606 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787643 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787670 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787698 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787726 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787757 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787785 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787813 5081 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787839 5081 reconstruct.go:97] "Volume reconstruction finished" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.787855 5081 reconciler.go:26] "Reconciler: start to sync state" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.801888 5081 manager.go:324] Recovery completed Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.823085 5081 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.824482 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.826257 5081 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.826329 5081 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.826368 5081 kubelet.go:2335] "Starting kubelet main sync loop" Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.826435 5081 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.827095 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.827152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.827181 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.827920 5081 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.827940 5081 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.827962 5081 state_mem.go:36] "Initialized new in-memory state store" Oct 03 15:28:01 crc kubenswrapper[5081]: W1003 15:28:01.828449 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.828554 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.847421 5081 policy_none.go:49] "None policy: Start" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.848618 5081 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.848646 5081 state_mem.go:35] "Initializing new in-memory state store" Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.854062 5081 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.914506 5081 manager.go:334] "Starting Device Plugin manager" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.914593 5081 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.914612 5081 server.go:79] "Starting device plugin registration server" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.915176 5081 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.915202 5081 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.915387 5081 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.915525 5081 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.915533 5081 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.924863 5081 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.927184 5081 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.927273 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.928222 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.928261 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.928279 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.928447 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.928970 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.929114 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930497 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930536 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930547 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930675 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930827 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930870 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930827 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930928 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.930941 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931521 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931540 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931599 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931623 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931554 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931670 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931816 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.931947 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.932004 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.932960 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933002 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933019 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933215 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933270 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933298 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933309 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933337 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.933370 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934407 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934434 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934445 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934444 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934534 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934549 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934768 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.934799 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.935334 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.935362 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.935376 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:01 crc kubenswrapper[5081]: E1003 15:28:01.963395 5081 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.59:6443: connect: connection refused" interval="400ms" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.991892 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.991952 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.991980 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992004 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992024 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992046 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992069 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992142 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992301 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992404 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992447 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992479 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992515 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992646 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:01 crc kubenswrapper[5081]: I1003 15:28:01.992690 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.015621 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.017072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.017120 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.017131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.017159 5081 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 15:28:02 crc kubenswrapper[5081]: E1003 15:28:02.017762 5081 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.59:6443: connect: connection refused" node="crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094148 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094247 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094282 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094313 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094342 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094371 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094401 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094428 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094458 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094485 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094515 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094543 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094604 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094631 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094660 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094911 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094963 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094973 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094912 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094987 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095014 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.094905 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095057 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095037 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095067 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095069 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095051 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095077 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095159 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.095178 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.218134 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.219855 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.219909 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.219923 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.219970 5081 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 15:28:02 crc kubenswrapper[5081]: E1003 15:28:02.220611 5081 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.59:6443: connect: connection refused" node="crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.269853 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.291649 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.299526 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.314903 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.320754 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 15:28:02 crc kubenswrapper[5081]: W1003 15:28:02.326825 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-f28a78cb28aa624d7a9890f533bf4609945f60a6a8742674418fbdb8fbb56117 WatchSource:0}: Error finding container f28a78cb28aa624d7a9890f533bf4609945f60a6a8742674418fbdb8fbb56117: Status 404 returned error can't find the container with id f28a78cb28aa624d7a9890f533bf4609945f60a6a8742674418fbdb8fbb56117 Oct 03 15:28:02 crc kubenswrapper[5081]: W1003 15:28:02.327398 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9326d6316e00552fce7c2a23f012df987e91fe0ca5f7a7ce7b1fa0eb27faf547 WatchSource:0}: Error finding container 9326d6316e00552fce7c2a23f012df987e91fe0ca5f7a7ce7b1fa0eb27faf547: Status 404 returned error can't find the container with id 9326d6316e00552fce7c2a23f012df987e91fe0ca5f7a7ce7b1fa0eb27faf547 Oct 03 15:28:02 crc kubenswrapper[5081]: W1003 15:28:02.329059 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-838aaa095a8ef9df89c15294743084851fe96acc632f337ce45e51bab2639c8b WatchSource:0}: Error finding container 838aaa095a8ef9df89c15294743084851fe96acc632f337ce45e51bab2639c8b: Status 404 returned error can't find the container with id 838aaa095a8ef9df89c15294743084851fe96acc632f337ce45e51bab2639c8b Oct 03 15:28:02 crc kubenswrapper[5081]: W1003 15:28:02.334674 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-8548d9c988633bf38782c186bb155f6438dd9eb3770171348202ff8fdbfddb80 WatchSource:0}: Error finding container 8548d9c988633bf38782c186bb155f6438dd9eb3770171348202ff8fdbfddb80: Status 404 returned error can't find the container with id 8548d9c988633bf38782c186bb155f6438dd9eb3770171348202ff8fdbfddb80 Oct 03 15:28:02 crc kubenswrapper[5081]: E1003 15:28:02.364765 5081 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.59:6443: connect: connection refused" interval="800ms" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.621489 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.622732 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.622770 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.622784 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.622813 5081 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 15:28:02 crc kubenswrapper[5081]: E1003 15:28:02.623507 5081 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.59:6443: connect: connection refused" node="crc" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.758621 5081 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:02 crc kubenswrapper[5081]: W1003 15:28:02.764234 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:02 crc kubenswrapper[5081]: E1003 15:28:02.764365 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:02 crc kubenswrapper[5081]: W1003 15:28:02.814192 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:02 crc kubenswrapper[5081]: E1003 15:28:02.814323 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.831581 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8548d9c988633bf38782c186bb155f6438dd9eb3770171348202ff8fdbfddb80"} Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.833489 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"838aaa095a8ef9df89c15294743084851fe96acc632f337ce45e51bab2639c8b"} Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.834713 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9326d6316e00552fce7c2a23f012df987e91fe0ca5f7a7ce7b1fa0eb27faf547"} Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.836062 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f28a78cb28aa624d7a9890f533bf4609945f60a6a8742674418fbdb8fbb56117"} Oct 03 15:28:02 crc kubenswrapper[5081]: I1003 15:28:02.837139 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"aa5476831b11b0b5a051ed6b3af1535c692d288bb88986d1fe6562da12a5fbc5"} Oct 03 15:28:03 crc kubenswrapper[5081]: W1003 15:28:03.045169 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:03 crc kubenswrapper[5081]: E1003 15:28:03.045702 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:03 crc kubenswrapper[5081]: W1003 15:28:03.160240 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:03 crc kubenswrapper[5081]: E1003 15:28:03.160325 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:03 crc kubenswrapper[5081]: E1003 15:28:03.166112 5081 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.59:6443: connect: connection refused" interval="1.6s" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.423797 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.426224 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.426279 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.426297 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.426342 5081 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 15:28:03 crc kubenswrapper[5081]: E1003 15:28:03.427156 5081 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.59:6443: connect: connection refused" node="crc" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.759544 5081 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.841904 5081 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="1ae7ad5274d47915e62eff4e7f037a517875ec91ae493215047d90f7b2500f9a" exitCode=0 Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.842014 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.842021 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"1ae7ad5274d47915e62eff4e7f037a517875ec91ae493215047d90f7b2500f9a"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.843244 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.843288 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.843303 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.843372 5081 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee" exitCode=0 Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.843424 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.843486 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.844304 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.844329 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.844340 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.846425 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.846458 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.846470 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.846481 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.846573 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.847430 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.847452 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.847462 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.848271 5081 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e" exitCode=0 Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.848333 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.848344 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.848920 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.848936 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.848945 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.850786 5081 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="dc78d99ce388335f3a1c540c102f9df613db55cee3f17b2b2e455e4ea7210ec5" exitCode=0 Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.850816 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"dc78d99ce388335f3a1c540c102f9df613db55cee3f17b2b2e455e4ea7210ec5"} Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.850952 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.851748 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.851786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.851805 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.852810 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.854614 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.854646 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:03 crc kubenswrapper[5081]: I1003 15:28:03.854658 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.759044 5081 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:04 crc kubenswrapper[5081]: E1003 15:28:04.767842 5081 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.59:6443: connect: connection refused" interval="3.2s" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.857903 5081 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="13f5c1fe5652bf0e090da9ee9464c625a4a22fa3d87494da512838236ea3328f" exitCode=0 Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.858216 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.858193 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"13f5c1fe5652bf0e090da9ee9464c625a4a22fa3d87494da512838236ea3328f"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.859467 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.859499 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.859515 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.861621 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4aa99747a6aa279c3920fb88e984a1ab7b98faad74f175c7f7d3e14ea0cbafe8"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.861733 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.863523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.863554 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.863595 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.868034 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.868081 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.868099 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.868190 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.869866 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.869908 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.869923 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.874742 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.874802 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.874806 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.880687 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.880723 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e"} Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.885368 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.885513 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:04 crc kubenswrapper[5081]: I1003 15:28:04.885662 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.027262 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.028965 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.029014 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.029027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.029064 5081 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 15:28:05 crc kubenswrapper[5081]: E1003 15:28:05.029758 5081 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.59:6443: connect: connection refused" node="crc" Oct 03 15:28:05 crc kubenswrapper[5081]: W1003 15:28:05.310461 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:05 crc kubenswrapper[5081]: E1003 15:28:05.310631 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:05 crc kubenswrapper[5081]: W1003 15:28:05.319260 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:05 crc kubenswrapper[5081]: E1003 15:28:05.319336 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:05 crc kubenswrapper[5081]: W1003 15:28:05.435338 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.59:6443: connect: connection refused Oct 03 15:28:05 crc kubenswrapper[5081]: E1003 15:28:05.435475 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.59:6443: connect: connection refused" logger="UnhandledError" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.879291 5081 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="4778f9af3e03ba07afba3960d2d9863cacfe5056336398629db7b3db8afc018f" exitCode=0 Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.879380 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"4778f9af3e03ba07afba3960d2d9863cacfe5056336398629db7b3db8afc018f"} Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.879585 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.881028 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.881055 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.881073 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.884673 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.884716 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.885198 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.885460 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161"} Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.885529 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.886333 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.886363 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.886376 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.887034 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.887073 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.887081 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.894789 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.894815 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:05 crc kubenswrapper[5081]: I1003 15:28:05.894825 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.453428 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.661707 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.891213 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7a197e1369049c06f336f7b5634f3fa1727ee9a82ef715badae895b328280f07"} Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.891278 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b0d9dd84760cfcf495de277c10b93aef2452ef74d5189edb9fa369d6a8fdc344"} Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.891307 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"04663e81e460a391cfabb132f40d324d8254003642d80a85ce77611b0bc80f5b"} Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.891284 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.891328 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bdfc0a2b963a34fc19ce4f9ca7afcfb59c14829ec82be6e3ad95d318f6358975"} Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.891400 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.892978 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.893025 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:06 crc kubenswrapper[5081]: I1003 15:28:06.893043 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.877895 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.878123 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.879972 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.880040 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.880058 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.901199 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.901300 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.902449 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.902863 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8cead89f21f7c6c41b9ef4e0ffb723c79ceff83cd13816af76d0275dbbfb13e9"} Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.903385 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.903426 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.903443 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.904552 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.904622 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.904638 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.919427 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.919700 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.921627 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.921669 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:07 crc kubenswrapper[5081]: I1003 15:28:07.921685 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.230945 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.232663 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.232747 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.232778 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.232837 5081 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.904373 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.905327 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.905364 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:08 crc kubenswrapper[5081]: I1003 15:28:08.905375 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.411047 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.411278 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.412353 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.412394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.412408 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.787346 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.787622 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.788929 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.788977 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:09 crc kubenswrapper[5081]: I1003 15:28:09.788994 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.055277 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.055613 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.057215 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.057280 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.057302 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.063274 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.878066 5081 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.878203 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.910989 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.912273 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.912314 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:10 crc kubenswrapper[5081]: I1003 15:28:10.912328 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:11 crc kubenswrapper[5081]: E1003 15:28:11.924960 5081 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 15:28:12 crc kubenswrapper[5081]: I1003 15:28:12.612338 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 03 15:28:12 crc kubenswrapper[5081]: I1003 15:28:12.612659 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:12 crc kubenswrapper[5081]: I1003 15:28:12.614208 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:12 crc kubenswrapper[5081]: I1003 15:28:12.614270 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:12 crc kubenswrapper[5081]: I1003 15:28:12.614283 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.608956 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.609167 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.610800 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.610842 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.610856 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.615137 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.919013 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.920657 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.920705 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:13 crc kubenswrapper[5081]: I1003 15:28:13.920717 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:15 crc kubenswrapper[5081]: I1003 15:28:15.760223 5081 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 03 15:28:16 crc kubenswrapper[5081]: W1003 15:28:16.060054 5081 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.060194 5081 trace.go:236] Trace[1896060625]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 15:28:06.058) (total time: 10002ms): Oct 03 15:28:16 crc kubenswrapper[5081]: Trace[1896060625]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:28:16.060) Oct 03 15:28:16 crc kubenswrapper[5081]: Trace[1896060625]: [10.002026651s] [10.002026651s] END Oct 03 15:28:16 crc kubenswrapper[5081]: E1003 15:28:16.060223 5081 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 03 15:28:16 crc kubenswrapper[5081]: E1003 15:28:16.208506 5081 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.186b04bb4336731a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-03 15:28:01.751347994 +0000 UTC m=+0.716904617,LastTimestamp:2025-10-03 15:28:01.751347994 +0000 UTC m=+0.716904617,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.662782 5081 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.662896 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.992200 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.992408 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.993536 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.993594 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:16 crc kubenswrapper[5081]: I1003 15:28:16.993609 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.063489 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.242344 5081 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.242431 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.637481 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.931895 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.933174 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.933225 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:17 crc kubenswrapper[5081]: I1003 15:28:17.933236 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:18 crc kubenswrapper[5081]: I1003 15:28:18.934863 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:18 crc kubenswrapper[5081]: I1003 15:28:18.936135 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:18 crc kubenswrapper[5081]: I1003 15:28:18.936182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:18 crc kubenswrapper[5081]: I1003 15:28:18.936198 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:20 crc kubenswrapper[5081]: I1003 15:28:20.878526 5081 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 15:28:20 crc kubenswrapper[5081]: I1003 15:28:20.878688 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.669545 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.669893 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.671681 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.671762 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.671790 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.675072 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:21 crc kubenswrapper[5081]: E1003 15:28:21.925109 5081 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.943831 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.943890 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.944741 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.944854 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:21 crc kubenswrapper[5081]: I1003 15:28:21.944966 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.233112 5081 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.237374 5081 trace.go:236] Trace[741619806]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 15:28:09.245) (total time: 12992ms): Oct 03 15:28:22 crc kubenswrapper[5081]: Trace[741619806]: ---"Objects listed" error: 12992ms (15:28:22.237) Oct 03 15:28:22 crc kubenswrapper[5081]: Trace[741619806]: [12.992151918s] [12.992151918s] END Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.237409 5081 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.238605 5081 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.239225 5081 trace.go:236] Trace[1600572571]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 15:28:11.490) (total time: 10748ms): Oct 03 15:28:22 crc kubenswrapper[5081]: Trace[1600572571]: ---"Objects listed" error: 10748ms (15:28:22.239) Oct 03 15:28:22 crc kubenswrapper[5081]: Trace[1600572571]: [10.74899371s] [10.74899371s] END Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.239262 5081 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.239306 5081 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.239376 5081 trace.go:236] Trace[1796338552]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 15:28:10.116) (total time: 12122ms): Oct 03 15:28:22 crc kubenswrapper[5081]: Trace[1796338552]: ---"Objects listed" error: 12122ms (15:28:22.239) Oct 03 15:28:22 crc kubenswrapper[5081]: Trace[1796338552]: [12.122485796s] [12.122485796s] END Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.239394 5081 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.269912 5081 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52028->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.269987 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52028->192.168.126.11:17697: read: connection reset by peer" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.270019 5081 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52030->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.270114 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52030->192.168.126.11:17697: read: connection reset by peer" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.270600 5081 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.270677 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.362170 5081 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.746789 5081 apiserver.go:52] "Watching apiserver" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.751586 5081 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.752027 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.752805 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.752894 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.752972 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.753265 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.753297 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.753347 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.753374 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.753707 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.754089 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.755482 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758286 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758313 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758467 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758605 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758636 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758669 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758697 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.758997 5081 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.759297 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.801706 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.816786 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.831534 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.843739 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.843826 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.843864 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.843897 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.843931 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.843965 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.843997 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844028 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844060 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844090 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844126 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844190 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844222 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844257 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844282 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844306 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844386 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844411 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844433 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844460 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844615 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844640 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844661 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844681 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844700 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844734 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844779 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844806 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844830 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844866 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844895 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844928 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844959 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.844987 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845010 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845034 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845064 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845092 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845115 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845136 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845165 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845197 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845196 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845251 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845219 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.845467 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:28:23.345437358 +0000 UTC m=+22.310993991 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845508 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845521 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845461 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845617 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845760 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845830 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845899 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845957 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.845982 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846013 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846077 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846116 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846132 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846193 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846220 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846248 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846258 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846304 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846363 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846375 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846423 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846480 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846535 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846635 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846666 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846689 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846789 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846747 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846862 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846908 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846915 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846969 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.846983 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847022 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847059 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847074 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847130 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847182 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847132 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847237 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847288 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847336 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847382 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847432 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847485 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847539 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847618 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847667 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847754 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847806 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847854 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847901 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847955 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848007 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848064 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848124 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848215 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848304 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848353 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848409 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848463 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848819 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848883 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848934 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848985 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849028 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849076 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849123 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849169 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849224 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849275 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849322 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849377 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849426 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849477 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849528 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850390 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850447 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850498 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850551 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850651 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850629 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850707 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851054 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851101 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851140 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851176 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851212 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851252 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851303 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851343 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851381 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851415 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851450 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851485 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851519 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851556 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851630 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851664 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851703 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851741 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851776 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851814 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851851 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851887 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851920 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851955 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851996 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852032 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852067 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852100 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852153 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852192 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852227 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852269 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852334 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852385 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852420 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852457 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852496 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852530 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852595 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852631 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852666 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852708 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852745 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852782 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852817 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852853 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852889 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852926 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852964 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852999 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853054 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853092 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853129 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853164 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853202 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853238 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853288 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853333 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853367 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853400 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853434 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853476 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853517 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853553 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853625 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853661 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853697 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853731 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853767 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853800 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853836 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853874 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853909 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853946 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853980 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854057 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854092 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854126 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854162 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854199 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854235 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854284 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854333 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854368 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854440 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854486 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854534 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854616 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854659 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854701 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854747 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854787 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854823 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854866 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854906 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854948 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854982 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855022 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855108 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855132 5081 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855156 5081 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855176 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855196 5081 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855220 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855249 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855276 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855303 5081 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855328 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855351 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855376 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855404 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855430 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855455 5081 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855480 5081 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847374 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847634 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.847683 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848095 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848337 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.848521 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849136 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849246 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849422 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849480 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849667 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849781 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.849817 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850179 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.850122 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851032 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851183 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851392 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.874275 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.874340 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.874648 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.874753 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:23.374725883 +0000 UTC m=+22.340282506 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.874751 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.874778 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.874941 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.874952 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851672 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875523 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875546 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875604 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851864 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852353 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852232 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852529 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852551 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852620 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852648 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.875727 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852683 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875747 5081 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.852994 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875722 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853039 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853075 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853352 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.853878 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854093 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854136 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854266 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854712 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854742 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854733 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.854871 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855047 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855281 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855596 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855642 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.855687 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856206 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856242 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856248 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856402 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856538 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856870 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856998 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.856803 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.857197 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.857198 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.858017 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.858227 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.858281 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.858362 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.860019 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.860229 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.860393 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876937 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.860568 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.860511 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876962 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.861613 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.861873 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.877006 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.862131 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.862472 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.863005 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.863401 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.865821 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.866166 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.866543 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.866620 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.866690 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.866868 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.867277 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.867484 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.867796 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.867892 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.868335 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.868422 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.868471 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.868501 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.868718 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.869200 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.869482 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.869591 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.869587 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.869750 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.869534 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.870894 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.871005 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.871087 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.871155 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.871460 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.871996 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872186 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872215 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872478 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872518 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872631 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872745 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872799 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872813 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872829 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872965 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.872996 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.873007 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.873118 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875083 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875146 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.875817 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:23.375793185 +0000 UTC m=+22.341349828 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875905 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.875959 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876169 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.851577 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876338 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876417 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876689 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.861655 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.877448 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.877620 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.877746 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.878025 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.878041 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.878369 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876389 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.876502 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.880211 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.880493 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.880660 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.881052 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.881345 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.881242 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.881363 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.881641 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.882395 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.883091 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.883407 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.883779 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.884186 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.884513 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.887015 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.887454 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.887674 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.888282 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.893169 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.893216 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.893238 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.893322 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:23.393295946 +0000 UTC m=+22.358852599 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.893738 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.893930 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.894100 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.894126 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.894168 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.894630 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.895357 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.898417 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.898472 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.898496 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:22 crc kubenswrapper[5081]: E1003 15:28:22.898612 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:23.398551359 +0000 UTC m=+22.364108222 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.898928 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.899380 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.899542 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.899733 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.900028 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.901872 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.902319 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.903390 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.905442 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.905479 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.907206 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.913633 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.915023 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.915156 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.919030 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.919032 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.919132 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.923312 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.926885 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.927596 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.930634 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.936897 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.942774 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.946420 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.947646 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.953370 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.954983 5081 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161" exitCode=255 Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.955020 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161"} Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956125 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956165 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956235 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956250 5081 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956261 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956272 5081 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956283 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956292 5081 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956301 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956309 5081 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956318 5081 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956326 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956334 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956344 5081 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956353 5081 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956362 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956369 5081 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.956377 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.957378 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.957518 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.970804 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975414 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975452 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975463 5081 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975473 5081 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975481 5081 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975491 5081 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975500 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975510 5081 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975521 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975531 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975540 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975549 5081 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975576 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975586 5081 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975594 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975602 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975611 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975621 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975631 5081 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975639 5081 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975648 5081 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975656 5081 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975664 5081 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975672 5081 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975680 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975688 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975697 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975705 5081 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975727 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975741 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975757 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975768 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975780 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975793 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975802 5081 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975810 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975819 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975828 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975837 5081 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975848 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975861 5081 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975871 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975881 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975892 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975900 5081 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975909 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975917 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975924 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975934 5081 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975945 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975957 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975968 5081 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975978 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.975988 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976000 5081 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976010 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976017 5081 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976027 5081 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976039 5081 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976050 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976061 5081 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976073 5081 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976083 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976124 5081 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976135 5081 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976146 5081 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976156 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976169 5081 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976180 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976192 5081 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976203 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976213 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976224 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976236 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976246 5081 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976257 5081 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976267 5081 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976279 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976291 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976303 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976316 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976326 5081 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976337 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976347 5081 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976358 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976369 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976381 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976392 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976402 5081 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976413 5081 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976424 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976434 5081 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976445 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976456 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976467 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976478 5081 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976489 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976499 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976511 5081 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976523 5081 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976534 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976545 5081 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976572 5081 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976584 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976596 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976608 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976618 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976629 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976640 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976652 5081 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976662 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976673 5081 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976684 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976695 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976706 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976717 5081 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976728 5081 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976739 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976751 5081 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976761 5081 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976772 5081 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976782 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976794 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976805 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976816 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976826 5081 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976836 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976847 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976858 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976868 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976880 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976892 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976906 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976916 5081 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976926 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976935 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976946 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976956 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976968 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976979 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.976990 5081 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977002 5081 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977012 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977022 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977033 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977044 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977056 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977065 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977075 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977086 5081 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977105 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977117 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977134 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977144 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977156 5081 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977167 5081 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977178 5081 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977189 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977199 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977210 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977220 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.977231 5081 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.979508 5081 scope.go:117] "RemoveContainer" containerID="c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161" Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.981152 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 03 15:28:22 crc kubenswrapper[5081]: I1003 15:28:22.996708 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.021150 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.035487 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.047128 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.060267 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.075838 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.076947 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.087831 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.088402 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.096015 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 15:28:23 crc kubenswrapper[5081]: W1003 15:28:23.100675 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-cffbda8f79fee9d4b3fd8d9009c20b0915e4c456d559afcc0f7698f7ad85dfaf WatchSource:0}: Error finding container cffbda8f79fee9d4b3fd8d9009c20b0915e4c456d559afcc0f7698f7ad85dfaf: Status 404 returned error can't find the container with id cffbda8f79fee9d4b3fd8d9009c20b0915e4c456d559afcc0f7698f7ad85dfaf Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.102355 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.380807 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.381095 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:28:24.381051424 +0000 UTC m=+23.346608037 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.381327 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.381352 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.381429 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.381493 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.381504 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:24.381492887 +0000 UTC m=+23.347049500 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.381578 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:24.381543128 +0000 UTC m=+23.347099741 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.482367 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.482456 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.482879 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.482929 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.482947 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.482968 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.482995 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.483011 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.483036 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:24.483012102 +0000 UTC m=+23.448568795 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.483095 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:24.483075234 +0000 UTC m=+23.448631847 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.826841 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:23 crc kubenswrapper[5081]: E1003 15:28:23.827020 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.833313 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.834361 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.836807 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.838318 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.840469 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.841888 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.843437 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.847713 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.851741 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.852697 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.853365 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.854250 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.854902 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.855575 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.856232 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.856902 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.857658 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.858153 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.858891 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.859603 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.860223 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.860916 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.861464 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.862301 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.865916 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.866685 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.868061 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.868545 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.869552 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.870049 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.871045 5081 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.871177 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.873220 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.873767 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.874605 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.877065 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.877927 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.878536 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.879319 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.880032 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.880518 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.881125 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.881726 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.882314 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.882784 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.883300 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.885264 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.889408 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.890009 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.891046 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.891600 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.892739 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.893421 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.894005 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.958876 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"00ee1c8b6580f2c141f95f22230e6de8c914fa32e019349118f6c07af0071778"} Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.960714 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5"} Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.960752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328"} Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.960768 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e58cab8ba108356cf46d3827be852a62d20a0c28dfe1686eacd925a20cacfa03"} Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.962379 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6"} Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.962442 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"cffbda8f79fee9d4b3fd8d9009c20b0915e4c456d559afcc0f7698f7ad85dfaf"} Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.963969 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.965887 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7"} Oct 03 15:28:23 crc kubenswrapper[5081]: I1003 15:28:23.966238 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.017770 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.037865 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.066574 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.082807 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.099020 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.119767 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.137050 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.150972 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.162960 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.179386 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.199323 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.212927 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.226579 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.239351 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.391164 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.391267 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.391290 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.391372 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:28:26.391342486 +0000 UTC m=+25.356899099 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.391397 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.391432 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.391462 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:26.391445849 +0000 UTC m=+25.357002462 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.391502 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:26.39148891 +0000 UTC m=+25.357045593 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.443402 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-7l6c6"] Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.443915 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.445865 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.448181 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.452196 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.472743 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.490211 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.491894 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5kss\" (UniqueName: \"kubernetes.io/projected/edef5639-bdcb-4f74-b2dc-d4bb64e24d85-kube-api-access-p5kss\") pod \"node-resolver-7l6c6\" (UID: \"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\") " pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.491952 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.491977 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/edef5639-bdcb-4f74-b2dc-d4bb64e24d85-hosts-file\") pod \"node-resolver-7l6c6\" (UID: \"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\") " pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.492021 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492155 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492183 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492196 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492242 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:26.492227113 +0000 UTC m=+25.457783726 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492320 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492338 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492348 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.492373 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:26.492365107 +0000 UTC m=+25.457921720 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.504423 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.516771 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.527687 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.541118 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.553120 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.567600 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.593212 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5kss\" (UniqueName: \"kubernetes.io/projected/edef5639-bdcb-4f74-b2dc-d4bb64e24d85-kube-api-access-p5kss\") pod \"node-resolver-7l6c6\" (UID: \"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\") " pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.593280 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/edef5639-bdcb-4f74-b2dc-d4bb64e24d85-hosts-file\") pod \"node-resolver-7l6c6\" (UID: \"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\") " pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.593624 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/edef5639-bdcb-4f74-b2dc-d4bb64e24d85-hosts-file\") pod \"node-resolver-7l6c6\" (UID: \"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\") " pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.613754 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5kss\" (UniqueName: \"kubernetes.io/projected/edef5639-bdcb-4f74-b2dc-d4bb64e24d85-kube-api-access-p5kss\") pod \"node-resolver-7l6c6\" (UID: \"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\") " pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.799273 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-7l6c6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.834512 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.834583 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.834674 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:24 crc kubenswrapper[5081]: E1003 15:28:24.834749 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.883186 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-lkz79"] Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.883613 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-7fljw"] Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.883898 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.884266 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.891090 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.891595 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.891778 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.891998 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.892169 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.892599 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.892873 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.893053 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.893184 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.893331 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.905058 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-sz44f"] Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.905790 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.910445 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.911324 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.926943 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.948824 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.970792 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.973078 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-7l6c6" event={"ID":"edef5639-bdcb-4f74-b2dc-d4bb64e24d85","Type":"ContainerStarted","Data":"caf476432474aa565225043485f6e730c22510e32e2c744816db9c6a88e5136d"} Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.986116 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:24Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998539 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-system-cni-dir\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998622 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-netns\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998654 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-cni-bin\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998683 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-proxy-tls\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998707 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cnibin\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998728 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998768 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998794 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-socket-dir-parent\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998816 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-multus-certs\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998843 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbmn5\" (UniqueName: \"kubernetes.io/projected/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-kube-api-access-nbmn5\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998866 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998895 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-cnibin\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998915 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-os-release\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998937 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-daemon-config\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998958 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj8tk\" (UniqueName: \"kubernetes.io/projected/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-kube-api-access-bj8tk\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.998980 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-mcd-auth-proxy-config\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999004 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-hostroot\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999024 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-conf-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999084 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-796d5\" (UniqueName: \"kubernetes.io/projected/af6b6616-1e4c-4618-890b-7eb334b8c339-kube-api-access-796d5\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999109 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/af6b6616-1e4c-4618-890b-7eb334b8c339-cni-binary-copy\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999131 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-k8s-cni-cncf-io\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999154 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-kubelet\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999174 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-system-cni-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999219 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-rootfs\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999278 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-os-release\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999299 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-etc-kubernetes\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999344 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-cni-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:24 crc kubenswrapper[5081]: I1003 15:28:24.999598 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-cni-multus\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.004894 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.019662 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.036600 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.049871 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.066949 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.082611 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.097182 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100039 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-rootfs\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100150 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-os-release\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100173 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-etc-kubernetes\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100185 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-rootfs\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100356 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-etc-kubernetes\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100413 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-os-release\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100481 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-cni-multus\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100507 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-cni-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100569 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-cni-multus\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100614 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-system-cni-dir\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100632 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-cni-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100633 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-netns\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100658 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-netns\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100691 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-system-cni-dir\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100693 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-cni-bin\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100720 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-cni-bin\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100733 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-proxy-tls\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100781 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cnibin\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100803 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100830 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-socket-dir-parent\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100849 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cnibin\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100890 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100907 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-socket-dir-parent\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100930 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbmn5\" (UniqueName: \"kubernetes.io/projected/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-kube-api-access-nbmn5\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.100951 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-multus-certs\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101066 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-cnibin\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101085 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-os-release\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101103 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-daemon-config\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101123 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101141 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj8tk\" (UniqueName: \"kubernetes.io/projected/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-kube-api-access-bj8tk\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101159 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-hostroot\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101179 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-conf-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101197 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-mcd-auth-proxy-config\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101236 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/af6b6616-1e4c-4618-890b-7eb334b8c339-cni-binary-copy\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101259 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-k8s-cni-cncf-io\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101308 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-kubelet\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101333 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-796d5\" (UniqueName: \"kubernetes.io/projected/af6b6616-1e4c-4618-890b-7eb334b8c339-kube-api-access-796d5\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101388 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-system-cni-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101477 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-system-cni-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101540 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cni-binary-copy\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101649 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-cnibin\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101691 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-multus-certs\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101732 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-os-release\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101877 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-daemon-config\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.101937 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-hostroot\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.102043 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-multus-conf-dir\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.102078 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-run-k8s-cni-cncf-io\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.102122 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.102176 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/af6b6616-1e4c-4618-890b-7eb334b8c339-host-var-lib-kubelet\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.102295 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-mcd-auth-proxy-config\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.102579 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/af6b6616-1e4c-4618-890b-7eb334b8c339-cni-binary-copy\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.102614 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.108354 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-proxy-tls\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.115756 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.124092 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-796d5\" (UniqueName: \"kubernetes.io/projected/af6b6616-1e4c-4618-890b-7eb334b8c339-kube-api-access-796d5\") pod \"multus-7fljw\" (UID: \"af6b6616-1e4c-4618-890b-7eb334b8c339\") " pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.133914 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbmn5\" (UniqueName: \"kubernetes.io/projected/e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6-kube-api-access-nbmn5\") pod \"multus-additional-cni-plugins-sz44f\" (UID: \"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\") " pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.133918 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj8tk\" (UniqueName: \"kubernetes.io/projected/fc0e93f4-3228-4f47-8edf-4d12bf3baddd-kube-api-access-bj8tk\") pod \"machine-config-daemon-lkz79\" (UID: \"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\") " pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.135795 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.155141 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.170272 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.183608 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.197775 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.210489 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-7fljw" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.211516 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.225251 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.233085 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-sz44f" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.235361 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.257502 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.270921 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.300635 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5bxx6"] Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.301980 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.308980 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.309008 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.309117 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.309023 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.309367 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.309479 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.309670 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.322538 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.339127 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.357223 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.375164 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.389357 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405284 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-var-lib-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405344 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rphc2\" (UniqueName: \"kubernetes.io/projected/e63642c7-8d80-4615-94d9-91d4c41421cc-kube-api-access-rphc2\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405363 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-script-lib\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405387 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-config\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405411 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-netns\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405431 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-netd\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405550 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405631 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-ovn\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405680 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-node-log\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405909 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-kubelet\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405944 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-log-socket\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.405972 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-env-overrides\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406012 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-systemd-units\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406081 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-bin\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406197 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-slash\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406225 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-systemd\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406249 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e63642c7-8d80-4615-94d9-91d4c41421cc-ovn-node-metrics-cert\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406290 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-ovn-kubernetes\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406315 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.406401 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-etc-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.411893 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.426021 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.446431 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.475703 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.495026 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.507733 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-etc-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.507888 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rphc2\" (UniqueName: \"kubernetes.io/projected/e63642c7-8d80-4615-94d9-91d4c41421cc-kube-api-access-rphc2\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.507816 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-etc-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.507960 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-var-lib-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.508018 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-var-lib-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.507981 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-script-lib\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.508060 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-config\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.508259 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-netns\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.508873 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-script-lib\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509058 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-config\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.508081 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-netns\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509469 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-netd\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509500 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509520 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-ovn\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509539 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-node-log\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509590 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-openvswitch\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509631 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-netd\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-ovn\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509751 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-node-log\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509911 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-kubelet\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509938 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-log-socket\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.509993 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-kubelet\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510028 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-env-overrides\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510063 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-log-socket\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510160 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-systemd-units\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510419 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-env-overrides\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510714 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-systemd-units\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510802 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-bin\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510904 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-slash\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510971 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-slash\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.511001 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-systemd\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.510845 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-bin\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.511073 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-ovn-kubernetes\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.511102 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.511145 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e63642c7-8d80-4615-94d9-91d4c41421cc-ovn-node-metrics-cert\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.511154 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-ovn-kubernetes\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.511183 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.511314 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-systemd\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.516155 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e63642c7-8d80-4615-94d9-91d4c41421cc-ovn-node-metrics-cert\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.518819 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.532978 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rphc2\" (UniqueName: \"kubernetes.io/projected/e63642c7-8d80-4615-94d9-91d4c41421cc-kube-api-access-rphc2\") pod \"ovnkube-node-5bxx6\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.534193 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.619326 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:25 crc kubenswrapper[5081]: W1003 15:28:25.632982 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode63642c7_8d80_4615_94d9_91d4c41421cc.slice/crio-94290c03451ed4d558da2921c984ebe6c69cbda00566a01a100c670d309d276c WatchSource:0}: Error finding container 94290c03451ed4d558da2921c984ebe6c69cbda00566a01a100c670d309d276c: Status 404 returned error can't find the container with id 94290c03451ed4d558da2921c984ebe6c69cbda00566a01a100c670d309d276c Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.827873 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:25 crc kubenswrapper[5081]: E1003 15:28:25.828089 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.979053 5081 generic.go:334] "Generic (PLEG): container finished" podID="e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6" containerID="b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400" exitCode=0 Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.979157 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerDied","Data":"b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.979217 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerStarted","Data":"d49f0f493b06a6c39ac2f2146bd27aa9e942d4018625b71d7fdedab1f16181c2"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.981581 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.981631 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.981645 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"e5d70a1c21b20732603006e7327605c31fec630669be8d7ded8f62d863b3415f"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.983902 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-7l6c6" event={"ID":"edef5639-bdcb-4f74-b2dc-d4bb64e24d85","Type":"ContainerStarted","Data":"d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.985110 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerStarted","Data":"c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.985149 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerStarted","Data":"9e8c525efbfd7bcd487759205bb18dcbc55d74aecfd9f8ffcaeaca6ecaef0c6d"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.986605 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.988324 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2" exitCode=0 Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.988363 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} Oct 03 15:28:25 crc kubenswrapper[5081]: I1003 15:28:25.988387 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"94290c03451ed4d558da2921c984ebe6c69cbda00566a01a100c670d309d276c"} Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.000960 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:25Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.027333 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.044781 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.060623 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.077167 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.091582 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.110784 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.127453 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.143516 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.161326 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.182292 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.204626 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.219997 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.237832 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.271230 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.296280 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.320055 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.338350 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.355203 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.376864 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.392215 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.408134 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.421200 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.421391 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.421456 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:28:30.421412765 +0000 UTC m=+29.386969388 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.421499 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.421597 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:30.42155375 +0000 UTC m=+29.387110533 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.421630 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.421790 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.421844 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:30.421837058 +0000 UTC m=+29.387393671 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.423546 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.445973 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:26Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.524208 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.524335 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.524949 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.524968 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.525048 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:30.525027182 +0000 UTC m=+29.490583855 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.525362 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.525456 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.525624 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.525707 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.525841 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:30.525821185 +0000 UTC m=+29.491377858 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.826714 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:26 crc kubenswrapper[5081]: I1003 15:28:26.826724 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.827293 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:26 crc kubenswrapper[5081]: E1003 15:28:26.827307 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.000634 5081 generic.go:334] "Generic (PLEG): container finished" podID="e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6" containerID="7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377" exitCode=0 Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.000788 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerDied","Data":"7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377"} Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.009715 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.009777 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.009791 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.009807 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.021877 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.037589 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.048992 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.065368 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.082011 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.101184 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.117798 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.139512 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.157959 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.169342 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.183338 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.197213 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.827457 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:27 crc kubenswrapper[5081]: E1003 15:28:27.828050 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.882947 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.887764 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.893765 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.898240 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.913818 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.928654 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.948780 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.964278 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.975374 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:27 crc kubenswrapper[5081]: I1003 15:28:27.988116 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:27Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.003650 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.016724 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.016775 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.019024 5081 generic.go:334] "Generic (PLEG): container finished" podID="e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6" containerID="8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01" exitCode=0 Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.019098 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerDied","Data":"8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01"} Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.022012 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.042048 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.060242 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.072555 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.085649 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.101366 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.117974 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.139889 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.158356 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.176531 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.197221 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.219837 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.244159 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.271616 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.296039 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.312073 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.312289 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-wg679"] Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.312708 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.314622 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.315034 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.315063 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.317114 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.328777 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.346653 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.359571 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.372404 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.391350 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.412903 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.426674 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.440309 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.446654 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnpmt\" (UniqueName: \"kubernetes.io/projected/4efa7d08-97eb-4655-8ee6-be870ebcd421-kube-api-access-dnpmt\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.446764 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4efa7d08-97eb-4655-8ee6-be870ebcd421-serviceca\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.446841 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4efa7d08-97eb-4655-8ee6-be870ebcd421-host\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.455090 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.468157 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.485353 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.501855 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.517973 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.535302 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.547472 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.547901 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnpmt\" (UniqueName: \"kubernetes.io/projected/4efa7d08-97eb-4655-8ee6-be870ebcd421-kube-api-access-dnpmt\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.547929 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4efa7d08-97eb-4655-8ee6-be870ebcd421-serviceca\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.548235 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4efa7d08-97eb-4655-8ee6-be870ebcd421-host\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.548327 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4efa7d08-97eb-4655-8ee6-be870ebcd421-host\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.549772 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4efa7d08-97eb-4655-8ee6-be870ebcd421-serviceca\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.571283 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnpmt\" (UniqueName: \"kubernetes.io/projected/4efa7d08-97eb-4655-8ee6-be870ebcd421-kube-api-access-dnpmt\") pod \"node-ca-wg679\" (UID: \"4efa7d08-97eb-4655-8ee6-be870ebcd421\") " pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.638957 5081 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.641455 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.641517 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.641535 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.641787 5081 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.648982 5081 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.649291 5081 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.650475 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.650507 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.650518 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.650538 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.650551 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.666427 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.670626 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.670671 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.670685 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.670704 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.670718 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.688073 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.692044 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.692080 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.692091 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.692106 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.692118 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.706160 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.710821 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.710867 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.710883 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.710906 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.710925 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.723329 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-wg679" Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.727308 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.732647 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.732697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.732709 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.732726 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.732740 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:28 crc kubenswrapper[5081]: W1003 15:28:28.740624 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4efa7d08_97eb_4655_8ee6_be870ebcd421.slice/crio-f222d84fe40587cdaedd2fc24b89e01f4c106a08f931f38156dcf74471c582df WatchSource:0}: Error finding container f222d84fe40587cdaedd2fc24b89e01f4c106a08f931f38156dcf74471c582df: Status 404 returned error can't find the container with id f222d84fe40587cdaedd2fc24b89e01f4c106a08f931f38156dcf74471c582df Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.752391 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:28Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.752505 5081 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.758637 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.758667 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.758676 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.758692 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.758702 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.827630 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.827708 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.827792 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:28 crc kubenswrapper[5081]: E1003 15:28:28.827908 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.862419 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.862474 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.862488 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.862508 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.862521 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.965145 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.965187 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.965198 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.965219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:28 crc kubenswrapper[5081]: I1003 15:28:28.965231 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:28Z","lastTransitionTime":"2025-10-03T15:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.026130 5081 generic.go:334] "Generic (PLEG): container finished" podID="e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6" containerID="3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493" exitCode=0 Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.026213 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerDied","Data":"3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.028283 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-wg679" event={"ID":"4efa7d08-97eb-4655-8ee6-be870ebcd421","Type":"ContainerStarted","Data":"52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.028313 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-wg679" event={"ID":"4efa7d08-97eb-4655-8ee6-be870ebcd421","Type":"ContainerStarted","Data":"f222d84fe40587cdaedd2fc24b89e01f4c106a08f931f38156dcf74471c582df"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.051394 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.069288 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.074117 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.074173 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.074185 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.074207 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.074231 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.090758 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.108014 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.129160 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.141885 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.157016 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.168849 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.177481 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.177791 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.177849 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.177875 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.177886 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.188164 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.215683 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.226753 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.240526 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.254143 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.269194 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.281578 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.281630 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.281649 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.281676 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.281722 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.287465 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.303533 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.318826 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.336778 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.359130 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.370814 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.384123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.384159 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.384170 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.384186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.384198 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.385401 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.400518 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.417146 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.429509 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.443598 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.455012 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.468052 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.481090 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:29Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.487652 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.487686 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.487694 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.487710 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.487719 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.589960 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.589997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.590007 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.590022 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.590031 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.692851 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.692970 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.693027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.693052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.693075 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.795944 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.795995 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.796007 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.796027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.796040 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.827667 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:29 crc kubenswrapper[5081]: E1003 15:28:29.827885 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.898907 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.898961 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.898974 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.898994 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:29 crc kubenswrapper[5081]: I1003 15:28:29.899008 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:29Z","lastTransitionTime":"2025-10-03T15:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.001967 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.002021 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.002033 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.002053 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.002065 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.035842 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.038346 5081 generic.go:334] "Generic (PLEG): container finished" podID="e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6" containerID="d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b" exitCode=0 Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.038384 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerDied","Data":"d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.056081 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.070326 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.085209 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.103550 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.109826 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.109915 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.109933 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.109965 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.109980 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.114009 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.131451 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.143223 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.156833 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.172910 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.193531 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.205535 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.213690 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.213741 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.213754 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.213773 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.213789 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.222533 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.238795 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.253363 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.316794 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.316849 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.316860 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.316881 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.316893 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.419407 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.419448 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.419460 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.419477 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.419490 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.468303 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.468430 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.468473 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:28:38.468449773 +0000 UTC m=+37.434006386 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.468502 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.468550 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.468619 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:38.468609958 +0000 UTC m=+37.434166571 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.468713 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.468852 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:38.468821534 +0000 UTC m=+37.434378167 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.522443 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.522494 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.522504 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.522527 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.522540 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.570021 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.570080 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570202 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570219 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570231 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570286 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:38.570270978 +0000 UTC m=+37.535827591 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570320 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570373 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570391 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.570466 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:38.570440143 +0000 UTC m=+37.535996806 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.625299 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.625344 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.625356 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.625376 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.625389 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.735324 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.735398 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.735419 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.735444 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.735462 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.826975 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.826977 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.827173 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:30 crc kubenswrapper[5081]: E1003 15:28:30.827315 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.840178 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.840217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.840234 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.840257 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.840274 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.943913 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.943949 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.943976 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.943994 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:30 crc kubenswrapper[5081]: I1003 15:28:30.944006 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:30Z","lastTransitionTime":"2025-10-03T15:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.045925 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.045971 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.045984 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.046004 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.046018 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.046274 5081 generic.go:334] "Generic (PLEG): container finished" podID="e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6" containerID="160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81" exitCode=0 Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.046324 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerDied","Data":"160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.071953 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.084254 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.098981 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.116187 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.132359 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.148321 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.150197 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.150231 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.150241 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.150256 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.150269 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.159878 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.172495 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.187942 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.205118 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.222948 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.237906 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.252492 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.252542 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.252553 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.252591 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.252604 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.254264 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.267996 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.356385 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.356432 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.356444 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.356462 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.356476 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.458866 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.459473 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.459718 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.459927 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.460030 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.563214 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.563246 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.563257 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.563275 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.563290 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.665494 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.665542 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.665573 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.665596 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.665612 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.768880 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.768935 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.768947 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.768965 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.768979 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.828241 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:31 crc kubenswrapper[5081]: E1003 15:28:31.828507 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.843716 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.860696 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.871542 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.872278 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.872300 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.872326 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.872340 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.881368 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.905737 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.919409 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.935145 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.948834 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.961807 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.973760 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.974930 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.974969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.974982 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.974999 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.975010 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:31Z","lastTransitionTime":"2025-10-03T15:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.985988 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:31 crc kubenswrapper[5081]: I1003 15:28:31.997943 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.009310 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.019334 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.030754 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.055643 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" event={"ID":"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6","Type":"ContainerStarted","Data":"8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.063517 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.063875 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.077300 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.077693 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.077735 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.077771 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.077790 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.077802 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.100853 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.101979 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.118609 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.133530 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.158466 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.170954 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.180730 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.180786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.180796 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.180812 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.180823 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.185795 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.198508 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.214162 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.227991 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.244961 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.257879 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.269094 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.280174 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.282940 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.282988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.283001 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.283019 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.283033 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.293059 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.307144 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.321686 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.340168 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.366293 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.386154 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.386211 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.386225 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.386250 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.386266 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.391664 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.405188 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.420403 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.434782 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.448636 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.467353 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.483166 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.488789 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.488843 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.488863 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.488888 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.488906 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.498778 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.512660 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.591896 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.591957 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.591974 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.591998 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.592017 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.695237 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.695320 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.695347 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.695380 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.695406 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.798493 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.798606 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.798622 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.798648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.798663 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.826796 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.826877 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:32 crc kubenswrapper[5081]: E1003 15:28:32.826959 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:32 crc kubenswrapper[5081]: E1003 15:28:32.827091 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.901411 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.901457 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.901468 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.901486 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:32 crc kubenswrapper[5081]: I1003 15:28:32.901498 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:32Z","lastTransitionTime":"2025-10-03T15:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.004661 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.004730 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.004748 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.004775 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.004796 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.073020 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.073798 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.108227 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.108342 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.108359 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.108380 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.108401 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.150437 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.168266 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.183859 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.196669 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.211742 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.211805 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.211827 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.211856 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.211882 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.213705 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.231786 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.252869 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.272976 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.290395 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.309222 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.319180 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.319234 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.319249 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.319273 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.319298 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.337843 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.365166 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.399046 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.413054 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.422356 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.422415 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.422426 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.422443 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.422454 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.427825 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:33Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.525515 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.525547 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.525579 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.525595 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.525604 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.628163 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.628204 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.628216 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.628236 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.628250 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.742616 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.742697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.742715 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.742740 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.742759 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.827156 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:33 crc kubenswrapper[5081]: E1003 15:28:33.827293 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.845512 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.845578 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.845590 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.845609 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.845640 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.948250 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.948298 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.948308 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.948327 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:33 crc kubenswrapper[5081]: I1003 15:28:33.948340 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:33Z","lastTransitionTime":"2025-10-03T15:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.051072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.051110 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.051120 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.051138 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.051149 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.076308 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.154398 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.154433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.154443 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.154458 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.154470 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.257667 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.257700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.257712 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.257730 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.257744 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.360717 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.360771 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.360783 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.360803 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.360816 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.464097 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.464131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.464141 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.464157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.464171 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.567629 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.567692 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.567703 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.567745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.567760 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.671062 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.671120 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.671131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.671154 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.671166 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.774538 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.774600 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.774615 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.774632 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.774645 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.827638 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.828059 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:34 crc kubenswrapper[5081]: E1003 15:28:34.828445 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:34 crc kubenswrapper[5081]: E1003 15:28:34.828625 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.877320 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.877377 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.877393 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.877410 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.877420 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.980589 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.980647 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.980658 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.980686 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:34 crc kubenswrapper[5081]: I1003 15:28:34.980702 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:34Z","lastTransitionTime":"2025-10-03T15:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.082712 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/0.log" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.083639 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.083701 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.083725 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.083756 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.083784 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.087250 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071" exitCode=1 Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.087307 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.088424 5081 scope.go:117] "RemoveContainer" containerID="3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.106662 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.122419 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.138429 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.151093 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.167172 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.182514 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.186713 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.186746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.186754 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.186769 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.186780 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.199077 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.218312 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.245524 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:34Z\\\",\\\"message\\\":\\\"0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 15:28:34.402856 6398 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 15:28:34.403098 6398 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 15:28:34.403387 6398 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 15:28:34.403554 6398 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 15:28:34.403705 6398 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 15:28:34.403914 6398 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 15:28:34.404326 6398 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.258751 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.274434 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.290392 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.290830 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.290844 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.290877 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.290891 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.298317 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.317200 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.331729 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.394402 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.394478 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.394492 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.394513 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.394550 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.498465 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.499009 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.499026 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.499047 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.499063 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.603034 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.603093 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.603108 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.603131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.603144 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.706392 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.706440 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.706452 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.706473 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.706487 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.809042 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.809117 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.809144 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.809177 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.809200 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.826853 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:35 crc kubenswrapper[5081]: E1003 15:28:35.826981 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.913045 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.913115 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.913138 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.913169 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:35 crc kubenswrapper[5081]: I1003 15:28:35.913195 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:35Z","lastTransitionTime":"2025-10-03T15:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.016137 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.016195 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.016211 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.016232 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.016246 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.093068 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/1.log" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.094203 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/0.log" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.097533 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2" exitCode=1 Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.097606 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.097659 5081 scope.go:117] "RemoveContainer" containerID="3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.098521 5081 scope.go:117] "RemoveContainer" containerID="782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2" Oct 03 15:28:36 crc kubenswrapper[5081]: E1003 15:28:36.098795 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.116811 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.119099 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.119198 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.119224 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.119250 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.119269 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.131391 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.146611 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.163520 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.181964 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b72c48e8dcd6d63b0e398fc1a8af27486a0b1038061f5df08ae7c0f685d7071\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:34Z\\\",\\\"message\\\":\\\"0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 15:28:34.402856 6398 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 15:28:34.403098 6398 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 15:28:34.403387 6398 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 15:28:34.403554 6398 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 15:28:34.403705 6398 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 15:28:34.403914 6398 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 15:28:34.404326 6398 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.192818 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.206394 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.220283 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.222279 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.222336 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.222352 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.222374 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.222389 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.237098 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.252079 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.265668 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.278852 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.295295 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.306361 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:36Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.325403 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.325444 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.325455 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.325471 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.325484 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.428345 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.428400 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.428412 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.428428 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.428438 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.531464 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.531838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.531961 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.532081 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.532244 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.635588 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.635865 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.635955 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.636047 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.636155 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.739510 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.739895 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.739988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.740090 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.740180 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.827619 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.828123 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:36 crc kubenswrapper[5081]: E1003 15:28:36.828347 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:36 crc kubenswrapper[5081]: E1003 15:28:36.828537 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.842851 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.842950 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.842969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.842998 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.843016 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.945599 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.945649 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.945660 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.945678 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:36 crc kubenswrapper[5081]: I1003 15:28:36.945690 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:36Z","lastTransitionTime":"2025-10-03T15:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.048780 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.048836 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.048848 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.048866 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.048879 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.107139 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/1.log" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.112423 5081 scope.go:117] "RemoveContainer" containerID="782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2" Oct 03 15:28:37 crc kubenswrapper[5081]: E1003 15:28:37.112680 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.125413 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.139505 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.151841 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.151869 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.151877 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.151892 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.151902 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.153705 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.172583 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.189158 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.202084 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.217248 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.238671 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.252969 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.254614 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.254656 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.254670 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.254689 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.254742 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.271890 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.287190 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.303869 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.317532 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.331152 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.356932 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.356969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.356979 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.357016 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.357031 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.460394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.460474 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.460498 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.460530 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.460554 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.562912 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.562956 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.562970 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.562988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.563001 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.614276 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.666096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.666135 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.666143 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.666162 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.666172 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.677134 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m"] Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.677716 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.680693 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.681094 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.699202 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.713364 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.727201 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.741748 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.750750 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ff1030e4-8512-4273-9a04-2cc7d89903ad-env-overrides\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.750821 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ff1030e4-8512-4273-9a04-2cc7d89903ad-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.750873 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ff1030e4-8512-4273-9a04-2cc7d89903ad-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.751041 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dp62\" (UniqueName: \"kubernetes.io/projected/ff1030e4-8512-4273-9a04-2cc7d89903ad-kube-api-access-6dp62\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.761302 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.774109 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.774423 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.774447 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.774477 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.774502 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.784415 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.806535 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.820970 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.826681 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:37 crc kubenswrapper[5081]: E1003 15:28:37.826833 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.836033 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.848777 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.851870 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ff1030e4-8512-4273-9a04-2cc7d89903ad-env-overrides\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.851961 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ff1030e4-8512-4273-9a04-2cc7d89903ad-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.852030 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ff1030e4-8512-4273-9a04-2cc7d89903ad-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.852759 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ff1030e4-8512-4273-9a04-2cc7d89903ad-env-overrides\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.852982 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ff1030e4-8512-4273-9a04-2cc7d89903ad-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.853096 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dp62\" (UniqueName: \"kubernetes.io/projected/ff1030e4-8512-4273-9a04-2cc7d89903ad-kube-api-access-6dp62\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.859928 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ff1030e4-8512-4273-9a04-2cc7d89903ad-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.863614 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.869489 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dp62\" (UniqueName: \"kubernetes.io/projected/ff1030e4-8512-4273-9a04-2cc7d89903ad-kube-api-access-6dp62\") pod \"ovnkube-control-plane-749d76644c-76b6m\" (UID: \"ff1030e4-8512-4273-9a04-2cc7d89903ad\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.878216 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.878265 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.878276 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.878293 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.878308 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.878696 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.892970 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.903701 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.918330 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:37Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.980384 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.980433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.980449 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.980472 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.980488 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:37Z","lastTransitionTime":"2025-10-03T15:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:37 crc kubenswrapper[5081]: I1003 15:28:37.992924 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.084099 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.084146 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.084157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.084178 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.084198 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.116289 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" event={"ID":"ff1030e4-8512-4273-9a04-2cc7d89903ad","Type":"ContainerStarted","Data":"17ef3829065e84dfb0804c24ed3f8f8d1d1ac7639fcc208c4b0d04ee10f44d03"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.117449 5081 scope.go:117] "RemoveContainer" containerID="782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.117735 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.186220 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.186295 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.186312 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.186339 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.186389 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.288542 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.288593 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.288602 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.288619 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.288630 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.391622 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.391692 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.391713 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.391744 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.391767 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.494691 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.494732 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.494745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.494782 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.494795 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.560938 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.561058 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.561137 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:28:54.561097547 +0000 UTC m=+53.526654170 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.561159 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.561209 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.561248 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:54.561224051 +0000 UTC m=+53.526780684 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.561353 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.561413 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:54.561403436 +0000 UTC m=+53.526960069 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.598170 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.598230 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.598239 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.598256 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.598267 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.661953 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.662011 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662125 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662141 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662155 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662214 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:54.662201341 +0000 UTC m=+53.627757954 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662252 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662296 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662314 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.662403 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:54.662379686 +0000 UTC m=+53.627936359 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.701274 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.701329 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.701344 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.701365 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.701381 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.793269 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-zdszj"] Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.793882 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.793975 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.803776 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.803857 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.803875 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.803898 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.803914 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.813336 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.826631 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.826631 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.826857 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.826937 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.832295 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.832344 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.832355 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.832373 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.832389 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.836550 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.851009 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.854758 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.854800 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.854814 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.854835 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.854850 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.854783 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.864523 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l78p5\" (UniqueName: \"kubernetes.io/projected/660149c8-a5c7-4581-abae-89611dafa042-kube-api-access-l78p5\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.864599 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.867703 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.870801 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.874230 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.874285 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.874297 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.874316 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.874336 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.882689 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.887119 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.893977 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.894014 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.894027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.894048 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.894062 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.899833 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.908653 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.912902 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.912976 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.912988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.913008 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.913383 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.920076 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.926255 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.926440 5081 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.928586 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.928623 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.928633 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.928651 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.928662 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:38Z","lastTransitionTime":"2025-10-03T15:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.935199 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.951226 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.965676 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l78p5\" (UniqueName: \"kubernetes.io/projected/660149c8-a5c7-4581-abae-89611dafa042-kube-api-access-l78p5\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.965724 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.965866 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: E1003 15:28:38.965932 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:39.465909352 +0000 UTC m=+38.431465965 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.970551 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.983431 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.984585 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l78p5\" (UniqueName: \"kubernetes.io/projected/660149c8-a5c7-4581-abae-89611dafa042-kube-api-access-l78p5\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:38 crc kubenswrapper[5081]: I1003 15:28:38.998501 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:38Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.016379 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.032160 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.032212 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.032221 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.032238 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.032247 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.037019 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.056973 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.072262 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.121581 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" event={"ID":"ff1030e4-8512-4273-9a04-2cc7d89903ad","Type":"ContainerStarted","Data":"832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.121638 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" event={"ID":"ff1030e4-8512-4273-9a04-2cc7d89903ad","Type":"ContainerStarted","Data":"308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.134405 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.134473 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.134489 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.134514 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.134532 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.140710 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.153291 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.164651 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.174938 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.187505 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.200805 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.213416 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.228672 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.237476 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.237509 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.237522 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.237543 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.237575 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.253331 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.267405 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.287444 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.306481 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.322909 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.336151 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.340651 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.340716 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.340733 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.340775 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.340792 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.353201 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.367196 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.418248 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.437483 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.444186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.444241 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.444255 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.444276 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.444292 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.466236 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.471644 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:39 crc kubenswrapper[5081]: E1003 15:28:39.471814 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:39 crc kubenswrapper[5081]: E1003 15:28:39.471892 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:40.471866501 +0000 UTC m=+39.437423154 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.480957 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.495124 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.507696 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.519679 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.534162 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.548038 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.548077 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.548087 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.548110 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.548121 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.548507 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.561633 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.575232 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.591773 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.605592 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.617011 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.628629 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.645128 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.651047 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.651271 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.651425 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.651627 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.651833 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.659530 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:39Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.755781 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.756241 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.756256 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.756276 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.756290 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.827842 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:39 crc kubenswrapper[5081]: E1003 15:28:39.828369 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.859423 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.859852 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.859932 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.860011 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.860082 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.963819 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.963879 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.963895 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.963920 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:39 crc kubenswrapper[5081]: I1003 15:28:39.963941 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:39Z","lastTransitionTime":"2025-10-03T15:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.067209 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.067341 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.067360 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.067388 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.067409 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.171074 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.171154 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.171173 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.171204 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.171225 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.274900 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.274983 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.274992 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.275012 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.275024 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.377870 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.377924 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.377940 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.377964 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.377978 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.481877 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.482001 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.482014 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.482037 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.482051 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.484282 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:40 crc kubenswrapper[5081]: E1003 15:28:40.484463 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:40 crc kubenswrapper[5081]: E1003 15:28:40.484521 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:42.484502631 +0000 UTC m=+41.450059244 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.584755 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.584805 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.584818 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.584834 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.584846 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.687919 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.687980 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.688000 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.688024 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.688042 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.790314 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.790362 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.790372 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.790389 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.790401 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.827209 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.827315 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.827247 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:40 crc kubenswrapper[5081]: E1003 15:28:40.827429 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:40 crc kubenswrapper[5081]: E1003 15:28:40.827552 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:40 crc kubenswrapper[5081]: E1003 15:28:40.827661 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.894147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.894218 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.894257 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.894288 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.894313 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.998426 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.998856 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.999012 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.999147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:40 crc kubenswrapper[5081]: I1003 15:28:40.999282 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:40Z","lastTransitionTime":"2025-10-03T15:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.103422 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.103499 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.103524 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.103554 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.103609 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.207219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.208103 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.208248 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.208398 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.208524 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.313061 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.313162 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.313200 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.313296 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.313320 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.417806 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.417871 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.417890 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.417919 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.417939 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.520992 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.521048 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.521062 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.521088 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.521103 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.624152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.624197 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.624210 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.624230 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.624240 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.727380 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.727464 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.727489 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.727524 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.727547 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.826942 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:41 crc kubenswrapper[5081]: E1003 15:28:41.827516 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.830180 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.830265 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.830293 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.830328 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.830351 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.850845 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:41Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.867613 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:41Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.886036 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:41Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.902509 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:41Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.933123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.933196 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.933221 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.933254 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.933277 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:41Z","lastTransitionTime":"2025-10-03T15:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.955661 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:41Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:41 crc kubenswrapper[5081]: I1003 15:28:41.990244 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:41Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.036341 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.036396 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.036409 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.036429 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.036446 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.037167 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.055357 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.075676 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.090272 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.106171 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.121589 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.135906 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.138767 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.138816 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.138830 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.138850 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.138864 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.151838 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.169921 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.185775 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:42Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.242211 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.242298 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.242319 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.242347 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.242366 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.345943 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.346010 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.346027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.346055 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.346073 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.449114 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.449147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.449159 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.449179 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.449191 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.519077 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:42 crc kubenswrapper[5081]: E1003 15:28:42.519341 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:42 crc kubenswrapper[5081]: E1003 15:28:42.519433 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:46.519403043 +0000 UTC m=+45.484959696 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.552376 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.552417 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.552435 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.552460 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.552481 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.656147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.656191 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.656201 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.656217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.656230 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.760132 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.760213 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.760240 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.760272 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.760301 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.826914 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.826965 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.826941 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:42 crc kubenswrapper[5081]: E1003 15:28:42.827100 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:42 crc kubenswrapper[5081]: E1003 15:28:42.827209 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:42 crc kubenswrapper[5081]: E1003 15:28:42.827414 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.863444 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.863501 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.863512 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.863533 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.863545 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.966295 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.966329 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.966339 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.966356 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:42 crc kubenswrapper[5081]: I1003 15:28:42.966365 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:42Z","lastTransitionTime":"2025-10-03T15:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.068867 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.068908 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.068921 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.068941 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.068955 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.171754 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.171822 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.171842 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.171870 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.171888 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.275378 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.275426 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.275435 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.275458 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.275471 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.377953 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.377986 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.377997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.378018 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.378029 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.481096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.481145 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.481159 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.481180 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.481195 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.584598 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.584663 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.584675 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.584694 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.584707 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.687736 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.687831 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.687844 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.687866 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.687878 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.790753 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.790805 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.790819 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.790841 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.790857 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.827277 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:43 crc kubenswrapper[5081]: E1003 15:28:43.827421 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.894279 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.894349 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.894362 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.894434 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.894451 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.998939 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.998990 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.999005 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.999025 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:43 crc kubenswrapper[5081]: I1003 15:28:43.999038 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:43Z","lastTransitionTime":"2025-10-03T15:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.101895 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.101950 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.101964 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.101986 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.102003 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.205771 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.205831 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.205851 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.205878 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.205897 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.309492 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.309534 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.309545 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.309575 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.309587 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.412001 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.412052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.412099 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.412123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.412138 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.515445 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.515484 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.515495 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.515518 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.515530 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.618744 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.618803 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.618816 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.618838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.618853 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.721940 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.721989 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.722004 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.722027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.722040 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.824893 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.824969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.824988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.825017 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.825036 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.827172 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.827216 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:44 crc kubenswrapper[5081]: E1003 15:28:44.827290 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.827323 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:44 crc kubenswrapper[5081]: E1003 15:28:44.827461 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:44 crc kubenswrapper[5081]: E1003 15:28:44.827614 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.927981 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.928035 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.928074 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.928093 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:44 crc kubenswrapper[5081]: I1003 15:28:44.928106 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:44Z","lastTransitionTime":"2025-10-03T15:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.031000 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.031033 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.031040 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.031058 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.031068 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.133845 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.133916 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.133925 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.133939 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.133951 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.236915 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.236966 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.236981 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.237005 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.237021 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.339777 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.339810 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.339818 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.339834 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.339847 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.443817 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.443866 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.443876 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.443893 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.443906 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.547414 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.547467 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.547479 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.547498 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.547512 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.652787 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.652861 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.652881 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.652908 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.652931 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.756326 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.756405 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.756424 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.756448 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.756469 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.827941 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:45 crc kubenswrapper[5081]: E1003 15:28:45.828142 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.859252 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.859315 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.859332 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.859357 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.859376 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.963113 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.963254 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.963287 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.963322 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:45 crc kubenswrapper[5081]: I1003 15:28:45.963348 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:45Z","lastTransitionTime":"2025-10-03T15:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.066540 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.066685 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.066707 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.066737 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.066756 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.174146 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.174193 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.174204 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.174220 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.174233 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.276726 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.276779 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.276796 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.276820 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.276836 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.379764 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.379827 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.379842 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.379866 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.379878 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.482967 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.483046 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.483065 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.483093 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.483114 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.570827 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:46 crc kubenswrapper[5081]: E1003 15:28:46.571120 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:46 crc kubenswrapper[5081]: E1003 15:28:46.571243 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:28:54.571213709 +0000 UTC m=+53.536770422 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.586968 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.587029 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.587041 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.587063 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.587078 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.689814 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.689858 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.689869 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.689886 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.689899 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.793259 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.793325 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.793342 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.793369 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.793386 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.826767 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.826891 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:46 crc kubenswrapper[5081]: E1003 15:28:46.826919 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.827037 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:46 crc kubenswrapper[5081]: E1003 15:28:46.827160 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:46 crc kubenswrapper[5081]: E1003 15:28:46.827248 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.896806 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.897096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.897227 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.897321 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:46 crc kubenswrapper[5081]: I1003 15:28:46.897406 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:46Z","lastTransitionTime":"2025-10-03T15:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.000271 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.000350 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.000366 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.000393 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.000406 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.104737 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.105075 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.105261 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.105697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.105883 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.209858 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.209918 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.209935 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.209959 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.209975 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.313091 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.313170 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.313189 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.313219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.313239 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.416460 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.416534 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.416586 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.416614 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.416633 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.520550 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.520641 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.520658 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.520683 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.520701 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.624394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.624475 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.624494 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.624521 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.624537 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.728426 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.728470 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.728481 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.728499 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.728512 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.827797 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:47 crc kubenswrapper[5081]: E1003 15:28:47.828026 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.831737 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.831789 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.831807 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.831867 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.831886 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.935066 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.935158 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.935181 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.935225 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:47 crc kubenswrapper[5081]: I1003 15:28:47.935251 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:47Z","lastTransitionTime":"2025-10-03T15:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.038366 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.038422 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.038431 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.038450 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.038463 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.141885 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.141929 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.141938 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.141954 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.141964 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.244701 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.245114 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.245316 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.245748 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.245924 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.349541 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.350005 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.350218 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.350432 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.350738 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.454116 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.454197 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.454219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.454252 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.454278 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.556919 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.557195 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.557263 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.557322 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.557374 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.661069 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.661484 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.661686 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.661835 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.661963 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.765191 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.765606 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.765787 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.765954 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.766101 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.827547 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.827817 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:48 crc kubenswrapper[5081]: E1003 15:28:48.827988 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.828022 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:48 crc kubenswrapper[5081]: E1003 15:28:48.828244 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:48 crc kubenswrapper[5081]: E1003 15:28:48.828368 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.869084 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.869152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.869174 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.869203 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.869223 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.972890 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.972955 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.972975 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.973002 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:48 crc kubenswrapper[5081]: I1003 15:28:48.973022 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:48Z","lastTransitionTime":"2025-10-03T15:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.076538 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.076638 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.076662 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.076692 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.076717 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.179824 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.179878 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.179894 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.179919 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.179936 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.257008 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.257075 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.257098 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.257128 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.257150 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: E1003 15:28:49.274237 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.280145 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.280232 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.280253 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.280277 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.280295 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: E1003 15:28:49.299197 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.305302 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.305393 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.305424 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.305459 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.305484 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: E1003 15:28:49.327301 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.332813 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.332960 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.333056 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.333144 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.333218 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: E1003 15:28:49.349546 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.354147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.354205 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.354223 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.354248 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.354314 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: E1003 15:28:49.371039 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: E1003 15:28:49.371277 5081 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.373507 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.373586 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.373605 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.373630 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.373649 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.476976 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.477694 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.477739 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.477761 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.477777 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.581024 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.581106 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.581144 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.581182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.581208 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.684226 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.684292 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.684310 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.684335 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.684352 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.788967 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.789070 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.789096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.789126 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.789153 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.795152 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.806489 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.820349 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.826903 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:49 crc kubenswrapper[5081]: E1003 15:28:49.827065 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.842251 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.870186 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.887248 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.894206 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.894287 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.894302 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.894324 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.894338 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.905342 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.921452 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.938626 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.960409 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.974931 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.991252 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:49Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.996252 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.996314 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.996330 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.996350 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:49 crc kubenswrapper[5081]: I1003 15:28:49.996688 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:49Z","lastTransitionTime":"2025-10-03T15:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.008879 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:50Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.032327 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:50Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.050471 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:50Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.068449 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:50Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.084540 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:50Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.099222 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.099287 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.099305 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.099325 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.099339 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.101044 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:50Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.202985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.203050 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.203063 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.203112 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.203129 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.306502 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.306599 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.306613 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.306631 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.306644 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.409793 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.409858 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.409906 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.409934 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.409949 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.512597 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.512647 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.512659 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.512678 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.512692 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.615867 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.615956 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.615976 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.616010 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.616030 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.719373 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.719454 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.719475 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.719504 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.719524 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.822906 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.822990 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.823027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.823060 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.823086 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.827579 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.827643 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.827682 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:50 crc kubenswrapper[5081]: E1003 15:28:50.827753 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:50 crc kubenswrapper[5081]: E1003 15:28:50.827869 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:50 crc kubenswrapper[5081]: E1003 15:28:50.828043 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.925894 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.925962 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.925985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.926014 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:50 crc kubenswrapper[5081]: I1003 15:28:50.926036 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:50Z","lastTransitionTime":"2025-10-03T15:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.028539 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.028607 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.028625 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.028645 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.028660 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.132060 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.132122 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.132136 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.132155 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.132169 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.235418 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.235474 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.235485 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.235505 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.235518 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.338645 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.338700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.338712 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.338732 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.338746 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.441943 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.442018 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.442033 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.442056 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.442071 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.545159 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.545212 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.545230 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.545254 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.545271 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.648797 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.648857 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.648869 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.648887 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.648902 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.751163 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.751224 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.751246 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.751270 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.751289 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.827602 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:51 crc kubenswrapper[5081]: E1003 15:28:51.827861 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.831834 5081 scope.go:117] "RemoveContainer" containerID="782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.852053 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.855722 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.855745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.855756 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.855775 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.855784 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.866151 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.885100 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.900551 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.920770 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.942925 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.960096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.960157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.960171 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.960193 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.960208 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:51Z","lastTransitionTime":"2025-10-03T15:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.960742 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:51 crc kubenswrapper[5081]: I1003 15:28:51.984036 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:51Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.017470 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.032513 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.049004 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.063485 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.063530 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.063541 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.063578 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.063592 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.064720 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.081454 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.096337 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.110819 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.127775 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.142609 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.165902 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.165945 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.165955 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.165977 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.165989 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.175912 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/1.log" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.185251 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.206616 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.220422 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.238151 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.257295 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.268891 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.268932 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.268947 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.268972 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.268984 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.270304 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.289389 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.305825 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.326637 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.340934 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.355818 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.371762 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.371804 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.371816 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.371833 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.371844 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.377741 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.394494 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.417214 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.432000 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.449612 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.466602 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.475330 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.475382 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.475398 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.475421 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.475435 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.484094 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.578713 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.578761 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.578776 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.578798 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.578813 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.681624 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.681694 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.681711 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.681737 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.681755 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.785673 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.785745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.785763 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.786205 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.786264 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.827215 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.827295 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:52 crc kubenswrapper[5081]: E1003 15:28:52.827414 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:52 crc kubenswrapper[5081]: E1003 15:28:52.827549 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.827744 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:52 crc kubenswrapper[5081]: E1003 15:28:52.827875 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.889630 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.889694 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.889715 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.889742 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.889761 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.993862 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.993932 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.993955 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.993985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:52 crc kubenswrapper[5081]: I1003 15:28:52.994004 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:52Z","lastTransitionTime":"2025-10-03T15:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.097209 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.097367 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.097394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.097427 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.097449 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.196824 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/2.log" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.198799 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/1.log" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.202663 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.202742 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.202768 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.202800 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.202825 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.204444 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66" exitCode=1 Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.204517 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.204629 5081 scope.go:117] "RemoveContainer" containerID="782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.206169 5081 scope.go:117] "RemoveContainer" containerID="ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66" Oct 03 15:28:53 crc kubenswrapper[5081]: E1003 15:28:53.206526 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.233498 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.254006 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.273890 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.294212 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.307014 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.307097 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.307123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.307161 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.307186 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.313275 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.330047 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.348091 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.366690 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.383621 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.403699 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.410433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.410523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.410554 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.410625 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.410653 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.420642 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.446123 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.473074 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.492263 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.513507 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.513612 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.513632 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.513660 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.513680 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.516987 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.537273 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.560156 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:53Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.616131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.616192 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.616207 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.616227 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.616240 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.720009 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.720072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.720090 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.720116 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.720134 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.824065 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.824163 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.824219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.824249 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.824479 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.827408 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:53 crc kubenswrapper[5081]: E1003 15:28:53.827595 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.927421 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.927474 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.927491 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.927530 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:53 crc kubenswrapper[5081]: I1003 15:28:53.927610 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:53Z","lastTransitionTime":"2025-10-03T15:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.031206 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.031282 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.031300 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.031327 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.031348 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.135133 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.135209 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.135230 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.135259 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.135282 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.212208 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/2.log" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.238067 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.238142 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.238165 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.238196 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.238221 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.341767 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.341838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.341863 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.341897 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.341922 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.446463 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.446529 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.446601 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.446635 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.446658 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.549441 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.549492 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.549508 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.549533 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.549550 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.583238 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.583408 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.583503 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.583602 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.583769 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.583849 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:29:10.583823756 +0000 UTC m=+69.549380409 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.584472 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:29:26.584452154 +0000 UTC m=+85.550008807 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.584553 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.584632 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:29:26.584617199 +0000 UTC m=+85.550173852 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.584709 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.584752 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:29:26.584739052 +0000 UTC m=+85.550295695 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.652495 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.652608 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.652637 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.652669 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.652691 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.685224 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.685353 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685520 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685550 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685617 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685619 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685637 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685649 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685915 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:29:26.685701042 +0000 UTC m=+85.651257685 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.685956 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:29:26.685937068 +0000 UTC m=+85.651493731 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.755531 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.755651 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.755670 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.755694 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.755712 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.826956 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.826996 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.827150 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.827691 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.827803 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:54 crc kubenswrapper[5081]: E1003 15:28:54.828044 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.859724 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.859779 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.859802 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.859831 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.859854 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.962362 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.962433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.962456 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.962486 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:54 crc kubenswrapper[5081]: I1003 15:28:54.962507 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:54Z","lastTransitionTime":"2025-10-03T15:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.066365 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.066750 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.066943 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.067103 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.067267 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.170505 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.170842 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.171038 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.171175 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.171299 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.274064 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.274114 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.274131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.274152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.274168 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.378625 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.378700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.378724 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.378753 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.378774 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.481473 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.481538 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.481583 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.481625 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.481644 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.585394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.585472 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.585491 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.585517 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.585536 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.688973 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.689050 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.689071 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.689103 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.689124 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.793411 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.793765 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.793847 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.793933 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.794018 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.827112 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:55 crc kubenswrapper[5081]: E1003 15:28:55.827345 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.897027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.897065 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.897075 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.897091 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:55 crc kubenswrapper[5081]: I1003 15:28:55.897102 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:55Z","lastTransitionTime":"2025-10-03T15:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.001031 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.001584 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.001596 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.001618 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.001633 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.105647 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.105696 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.105708 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.105733 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.105758 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.208072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.208128 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.208145 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.208168 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.208184 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.310395 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.310457 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.310469 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.310494 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.310507 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.413415 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.413461 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.413471 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.413488 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.413498 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.516095 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.516124 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.516133 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.516149 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.516160 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.619472 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.619525 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.619541 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.619583 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.619630 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.723023 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.723091 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.723111 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.723135 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.723157 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826226 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826308 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826325 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826355 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826378 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826655 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826709 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.826739 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:56 crc kubenswrapper[5081]: E1003 15:28:56.826854 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:56 crc kubenswrapper[5081]: E1003 15:28:56.827006 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:56 crc kubenswrapper[5081]: E1003 15:28:56.827198 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.929430 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.929531 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.929589 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.929627 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:56 crc kubenswrapper[5081]: I1003 15:28:56.929648 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:56Z","lastTransitionTime":"2025-10-03T15:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.032520 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.032605 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.032631 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.032665 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.032688 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.135947 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.135997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.136020 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.136051 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.136076 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.239549 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.239664 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.239693 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.239745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.239774 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.343092 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.343182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.343233 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.343260 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.343278 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.447635 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.448072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.448263 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.448436 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.448641 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.552910 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.553329 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.553486 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.553671 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.553819 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.658135 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.658678 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.658838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.659037 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.659297 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.762639 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.762700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.762716 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.762741 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.762759 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.826921 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:57 crc kubenswrapper[5081]: E1003 15:28:57.827121 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.865972 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.866049 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.866068 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.866100 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.866118 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.969418 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.969767 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.969836 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.969932 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:57 crc kubenswrapper[5081]: I1003 15:28:57.970004 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:57Z","lastTransitionTime":"2025-10-03T15:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.073315 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.073814 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.073996 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.074215 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.074379 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.177268 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.177340 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.177357 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.177385 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.177407 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.280079 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.280117 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.280128 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.280146 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.280158 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.382422 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.382763 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.382899 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.383072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.383188 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.487034 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.487110 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.487202 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.487244 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.487270 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.589383 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.589693 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.589778 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.589910 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.590010 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.692536 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.692653 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.692676 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.692700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.692718 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.795609 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.795664 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.795682 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.795705 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.795721 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.827391 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.827436 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.827946 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:28:58 crc kubenswrapper[5081]: E1003 15:28:58.828238 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:28:58 crc kubenswrapper[5081]: E1003 15:28:58.828375 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:28:58 crc kubenswrapper[5081]: E1003 15:28:58.828604 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.898124 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.898171 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.898182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.898198 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:58 crc kubenswrapper[5081]: I1003 15:28:58.898210 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:58Z","lastTransitionTime":"2025-10-03T15:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.001316 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.001384 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.001400 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.001427 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.001444 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.104638 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.104709 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.104728 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.104755 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.104776 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.208320 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.208383 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.208399 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.208425 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.208444 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.311740 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.311809 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.311826 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.311852 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.311872 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.414379 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.414419 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.414430 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.414452 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.414466 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.518613 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.518672 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.518682 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.518704 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.518718 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.621771 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.621835 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.621854 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.621881 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.621898 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.724935 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.725011 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.725030 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.725056 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.725074 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.772200 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.772254 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.772272 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.772294 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.772311 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: E1003 15:28:59.792402 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:59Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.797775 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.797809 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.797821 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.797838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.797853 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: E1003 15:28:59.813840 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:59Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.821119 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.821159 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.821171 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.821188 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.821200 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.827176 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:28:59 crc kubenswrapper[5081]: E1003 15:28:59.827330 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:28:59 crc kubenswrapper[5081]: E1003 15:28:59.836814 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:59Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.847350 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.847389 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.847398 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.847416 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.847426 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: E1003 15:28:59.861322 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:59Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.865648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.865851 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.865913 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.865977 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.866035 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: E1003 15:28:59.882188 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:59Z is after 2025-08-24T17:21:41Z" Oct 03 15:28:59 crc kubenswrapper[5081]: E1003 15:28:59.882733 5081 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.884521 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.884548 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.884575 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.884593 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.884606 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.996864 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.997334 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.997540 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.997724 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:28:59 crc kubenswrapper[5081]: I1003 15:28:59.997902 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:28:59Z","lastTransitionTime":"2025-10-03T15:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.101354 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.101427 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.101440 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.101461 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.101496 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.204341 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.204659 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.204743 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.204816 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.204886 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.307988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.308052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.308070 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.308095 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.308113 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.412072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.412140 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.412157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.412186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.412204 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.515454 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.515521 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.515539 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.515598 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.515619 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.618803 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.619164 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.619697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.620050 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.620357 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.725925 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.725960 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.725970 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.725985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.725998 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.827269 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:00 crc kubenswrapper[5081]: E1003 15:29:00.827490 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.828110 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:00 crc kubenswrapper[5081]: E1003 15:29:00.828180 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.828280 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:00 crc kubenswrapper[5081]: E1003 15:29:00.828354 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.831308 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.831350 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.831363 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.831383 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.831397 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.935136 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.935220 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.935247 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.935279 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:00 crc kubenswrapper[5081]: I1003 15:29:00.935305 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:00Z","lastTransitionTime":"2025-10-03T15:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.038048 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.038128 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.038166 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.038196 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.038230 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.141358 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.141433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.141468 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.141491 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.141504 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.245051 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.245089 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.245099 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.245112 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.245122 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.348803 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.348875 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.348898 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.348927 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.348950 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.452675 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.452782 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.452816 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.452859 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.452887 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.556238 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.556314 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.556333 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.556369 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.556394 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.659959 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.660040 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.660067 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.660100 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.660126 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.762882 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.762950 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.762963 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.762986 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.762999 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.827458 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:01 crc kubenswrapper[5081]: E1003 15:29:01.827744 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.851962 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:01Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.867011 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.867146 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.867175 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.867217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.867245 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.879769 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:01Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.915586 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://782cce8f618ea86b2e503f6a2fa16daded78e2ba3c6c0daa00cdb5d0be3a49d2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:36Z\\\",\\\"message\\\":\\\"ube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:35Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:28:36.027662 6523 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:01Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.933633 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:01Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.955498 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:01Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.969184 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.969219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.969231 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.969249 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.969261 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:01Z","lastTransitionTime":"2025-10-03T15:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.972100 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:01Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:01 crc kubenswrapper[5081]: I1003 15:29:01.990364 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:01Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.006148 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.020779 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.038004 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.049472 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.064500 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.071928 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.071990 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.072007 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.072031 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.072048 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.075912 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.085963 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.096901 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.109964 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.125542 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:02Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.175091 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.175152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.175166 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.175190 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.175205 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.279686 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.279745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.279761 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.279792 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.279811 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.383023 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.383068 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.383082 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.383100 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.383113 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.486363 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.486417 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.486433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.486457 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.486470 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.589584 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.589622 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.589632 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.589648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.589664 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.696936 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.697019 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.697046 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.697080 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.697105 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.801076 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.801143 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.801160 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.801187 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.801205 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.827017 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:02 crc kubenswrapper[5081]: E1003 15:29:02.827215 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.827458 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:02 crc kubenswrapper[5081]: E1003 15:29:02.827537 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.827954 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:02 crc kubenswrapper[5081]: E1003 15:29:02.828009 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.904050 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.904109 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.904122 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.904141 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:02 crc kubenswrapper[5081]: I1003 15:29:02.904154 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:02Z","lastTransitionTime":"2025-10-03T15:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.007098 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.007150 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.007162 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.007183 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.007196 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.110914 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.110969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.110986 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.111008 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.111020 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.214594 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.214649 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.214665 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.214690 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.214707 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.318107 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.318166 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.318190 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.318216 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.318230 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.421214 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.421292 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.421304 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.421326 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.421340 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.525466 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.525545 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.525616 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.525652 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.525676 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.629103 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.629139 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.629150 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.629169 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.629180 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.732817 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.732872 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.732885 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.732909 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.732922 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.827042 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:03 crc kubenswrapper[5081]: E1003 15:29:03.827595 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.827693 5081 scope.go:117] "RemoveContainer" containerID="ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66" Oct 03 15:29:03 crc kubenswrapper[5081]: E1003 15:29:03.827990 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.835392 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.835427 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.835440 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.835457 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.835470 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.844880 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.864917 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.879926 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.897159 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.916791 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.936383 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.940278 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.940506 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.940786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.941016 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.941178 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:03Z","lastTransitionTime":"2025-10-03T15:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.961644 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.975437 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:03 crc kubenswrapper[5081]: I1003 15:29:03.991170 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:03Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.006074 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.023076 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.037397 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.043856 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.046147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.046277 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.046412 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.046538 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.050839 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.063993 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.077173 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.095600 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.140388 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:04Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.149355 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.149415 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.149429 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.149454 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.149469 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.252271 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.252341 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.252356 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.252386 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.252405 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.355334 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.355395 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.355411 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.355437 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.355455 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.459686 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.460115 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.460288 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.460458 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.460679 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.564465 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.564536 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.564619 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.564652 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.564671 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.668054 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.668163 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.668183 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.668215 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.668235 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.771734 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.771818 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.771844 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.771878 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.771929 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.827292 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.827297 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:04 crc kubenswrapper[5081]: E1003 15:29:04.827914 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:04 crc kubenswrapper[5081]: E1003 15:29:04.827955 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.827347 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:04 crc kubenswrapper[5081]: E1003 15:29:04.828345 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.875186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.876342 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.876521 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.876750 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.876915 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.980275 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.980357 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.980374 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.980405 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:04 crc kubenswrapper[5081]: I1003 15:29:04.980420 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:04Z","lastTransitionTime":"2025-10-03T15:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.083472 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.083546 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.083609 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.083648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.083673 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.186894 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.186965 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.186985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.187012 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.187031 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.290590 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.290658 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.290675 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.290697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.290711 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.393730 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.393832 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.393847 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.393871 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.393886 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.496338 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.496380 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.496393 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.496436 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.496446 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.599597 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.599649 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.599667 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.599695 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.599714 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.702930 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.703405 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.703418 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.703437 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.703450 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.806079 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.806131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.806145 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.806164 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.806178 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.827763 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:05 crc kubenswrapper[5081]: E1003 15:29:05.828076 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.910002 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.910056 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.910067 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.910086 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:05 crc kubenswrapper[5081]: I1003 15:29:05.910097 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:05Z","lastTransitionTime":"2025-10-03T15:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.013827 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.013891 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.013903 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.013929 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.013945 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.116576 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.116631 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.116641 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.116659 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.116672 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.219395 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.219448 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.219462 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.219482 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.219495 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.321639 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.321686 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.321697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.321715 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.321728 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.424675 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.424718 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.424731 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.424747 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.424760 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.526859 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.526910 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.526928 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.526950 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.526966 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.629794 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.629833 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.629845 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.629861 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.629871 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.732837 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.732875 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.732885 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.732901 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.732912 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.827665 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.827683 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.827699 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:06 crc kubenswrapper[5081]: E1003 15:29:06.827801 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:06 crc kubenswrapper[5081]: E1003 15:29:06.828012 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:06 crc kubenswrapper[5081]: E1003 15:29:06.828179 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.836755 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.836811 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.836827 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.836850 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.836867 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.939328 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.939365 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.939378 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.939397 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:06 crc kubenswrapper[5081]: I1003 15:29:06.939411 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:06Z","lastTransitionTime":"2025-10-03T15:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.042233 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.042280 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.042291 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.042308 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.042321 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.144636 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.144715 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.144731 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.144747 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.144759 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.247631 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.247664 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.247677 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.247692 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.247703 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.351211 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.351281 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.351299 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.351328 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.351351 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.493850 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.493889 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.493896 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.493912 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.493922 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.596012 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.596050 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.596059 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.596073 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.596082 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.614327 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.614997 5081 scope.go:117] "RemoveContainer" containerID="ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66" Oct 03 15:29:07 crc kubenswrapper[5081]: E1003 15:29:07.615139 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.698450 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.698494 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.698503 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.698523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.698535 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.801743 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.801848 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.801878 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.801920 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.801960 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.826896 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:07 crc kubenswrapper[5081]: E1003 15:29:07.827052 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.904942 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.905018 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.905033 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.905060 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:07 crc kubenswrapper[5081]: I1003 15:29:07.905075 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:07Z","lastTransitionTime":"2025-10-03T15:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.008817 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.008876 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.008888 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.008908 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.008923 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.111670 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.111720 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.111729 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.111749 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.111763 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.215026 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.215195 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.215221 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.215247 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.215268 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.317967 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.318009 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.318027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.318042 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.318051 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.421820 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.421895 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.421911 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.421930 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.421944 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.524659 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.524709 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.524723 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.524746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.524766 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.627052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.627124 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.627138 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.627156 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.627167 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.729883 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.729933 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.729947 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.729969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.729985 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.827486 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.827498 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:08 crc kubenswrapper[5081]: E1003 15:29:08.827757 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:08 crc kubenswrapper[5081]: E1003 15:29:08.827855 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.827515 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:08 crc kubenswrapper[5081]: E1003 15:29:08.828151 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.832859 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.832945 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.832964 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.832993 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.833017 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.937326 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.937384 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.937409 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.937429 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:08 crc kubenswrapper[5081]: I1003 15:29:08.937447 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:08Z","lastTransitionTime":"2025-10-03T15:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.040894 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.040956 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.040975 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.041001 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.041022 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.144641 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.144716 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.144728 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.144772 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.144794 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.247944 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.248017 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.248032 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.248062 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.248079 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.350540 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.350634 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.350652 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.350676 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.350694 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.454697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.454764 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.454786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.454817 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.454842 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.558017 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.558051 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.558064 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.558083 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.558095 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.662109 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.662601 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.662785 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.662992 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.663142 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.766102 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.766164 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.766176 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.766200 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.766211 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.827405 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:09 crc kubenswrapper[5081]: E1003 15:29:09.827642 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.869100 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.869616 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.869783 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.869951 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.870080 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.972983 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.973046 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.973067 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.973084 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:09 crc kubenswrapper[5081]: I1003 15:29:09.973093 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:09Z","lastTransitionTime":"2025-10-03T15:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.075646 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.075732 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.075815 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.075853 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.075875 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.080792 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.080840 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.080851 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.080869 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.080884 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.102834 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:10Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.108603 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.108883 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.108988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.109071 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.109149 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.125422 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:10Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.130259 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.130398 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.130491 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.130603 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.130713 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.147234 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:10Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.151048 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.151116 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.151132 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.151154 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.151170 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.167777 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:10Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.172027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.172061 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.172073 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.172096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.172113 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.186840 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:10Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.186989 5081 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.192293 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.192411 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.192432 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.192482 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.192499 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.294365 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.294424 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.294442 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.294466 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.294484 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.398482 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.398533 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.398544 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.398581 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.398593 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.500959 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.501435 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.501536 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.502029 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.502401 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.606435 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.606488 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.606506 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.606530 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.606545 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.680747 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.680983 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.681127 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:29:42.681094787 +0000 UTC m=+101.646651590 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.709877 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.709970 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.710016 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.710043 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.710087 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.812838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.812894 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.812907 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.812926 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.812938 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.826774 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.826842 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.826922 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.826874 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.827140 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:10 crc kubenswrapper[5081]: E1003 15:29:10.827244 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.916969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.917037 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.917052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.917076 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:10 crc kubenswrapper[5081]: I1003 15:29:10.917095 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:10Z","lastTransitionTime":"2025-10-03T15:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.020430 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.020511 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.020535 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.020600 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.020625 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.123997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.124057 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.124071 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.124088 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.124099 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.227140 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.227204 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.227217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.227242 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.227258 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.330648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.330715 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.330730 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.330752 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.330765 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.433800 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.433858 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.433870 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.433892 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.433906 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.537629 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.538096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.538276 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.538435 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.538596 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.642189 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.642263 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.642278 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.642298 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.642311 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.745458 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.745523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.745537 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.745554 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.745586 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.827743 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:11 crc kubenswrapper[5081]: E1003 15:29:11.827935 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.848039 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.848103 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.848123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.848153 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.848176 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.848911 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.867203 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.881092 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.894780 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.913025 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.937500 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.950157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.950206 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.950220 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.950238 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.950250 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:11Z","lastTransitionTime":"2025-10-03T15:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.957236 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.970868 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.984498 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:11 crc kubenswrapper[5081]: I1003 15:29:11.999686 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:11Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.013341 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.028800 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.043685 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.053461 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.053515 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.053528 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.053548 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.053580 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.056728 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.070326 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.085807 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.102143 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.156816 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.156869 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.156880 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.156896 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.156912 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.259838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.259917 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.259938 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.259969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.259988 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.280624 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/0.log" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.280687 5081 generic.go:334] "Generic (PLEG): container finished" podID="af6b6616-1e4c-4618-890b-7eb334b8c339" containerID="c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40" exitCode=1 Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.280724 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerDied","Data":"c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.281164 5081 scope.go:117] "RemoveContainer" containerID="c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.304076 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.320606 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.338866 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.355170 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.362609 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.362635 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.362646 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.362662 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.362673 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.373001 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.386751 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.408979 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.420545 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.433572 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.448991 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.465203 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.465244 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.465256 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.465276 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.465292 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.465746 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.484707 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.499279 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"2025-10-03T15:28:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873\\\\n2025-10-03T15:28:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873 to /host/opt/cni/bin/\\\\n2025-10-03T15:28:27Z [verbose] multus-daemon started\\\\n2025-10-03T15:28:27Z [verbose] Readiness Indicator file check\\\\n2025-10-03T15:29:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.516637 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.537240 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.548990 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.563215 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:12Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.567985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.568206 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.568318 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.568432 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.568529 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.671110 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.671153 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.671164 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.671180 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.671193 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.773729 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.773814 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.773832 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.773852 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.773868 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.827327 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.827351 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:12 crc kubenswrapper[5081]: E1003 15:29:12.827491 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:12 crc kubenswrapper[5081]: E1003 15:29:12.827524 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.827981 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:12 crc kubenswrapper[5081]: E1003 15:29:12.828371 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.876329 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.876381 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.876394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.876413 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.876425 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.979329 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.979675 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.979701 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.979719 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:12 crc kubenswrapper[5081]: I1003 15:29:12.979731 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:12Z","lastTransitionTime":"2025-10-03T15:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.082051 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.082104 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.082116 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.082135 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.082145 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.184651 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.184710 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.184723 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.184748 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.184761 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.284987 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/0.log" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.285071 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerStarted","Data":"b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.286507 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.286595 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.286610 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.286631 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.286650 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.299042 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.312779 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.326322 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.339741 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.354378 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.369719 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.388122 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.389666 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.389722 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.389740 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.389767 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.389786 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.404055 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.417946 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.435494 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.451288 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.463606 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.479892 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.492578 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.492631 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.492646 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.492666 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.492682 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.495854 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"2025-10-03T15:28:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873\\\\n2025-10-03T15:28:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873 to /host/opt/cni/bin/\\\\n2025-10-03T15:28:27Z [verbose] multus-daemon started\\\\n2025-10-03T15:28:27Z [verbose] Readiness Indicator file check\\\\n2025-10-03T15:29:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.512835 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.531335 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.543915 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:13Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.595737 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.595789 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.595799 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.595818 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.595831 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.698651 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.698811 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.698893 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.698924 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.698950 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.802593 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.802635 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.802645 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.802662 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.802675 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.827262 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:13 crc kubenswrapper[5081]: E1003 15:29:13.827410 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.905760 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.905840 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.905857 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.905878 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:13 crc kubenswrapper[5081]: I1003 15:29:13.905901 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:13Z","lastTransitionTime":"2025-10-03T15:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.007888 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.007947 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.007989 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.008009 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.008026 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.111545 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.111653 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.111676 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.111705 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.111727 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.215145 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.215214 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.215233 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.215259 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.215276 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.318089 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.318160 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.318183 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.318210 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.318232 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.420946 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.421009 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.421020 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.421039 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.421053 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.523883 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.523940 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.523953 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.523973 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.523991 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.627403 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.627460 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.627472 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.627495 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.627508 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.729835 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.729881 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.729890 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.729906 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.729920 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.826678 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.826770 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:14 crc kubenswrapper[5081]: E1003 15:29:14.826877 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.826772 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:14 crc kubenswrapper[5081]: E1003 15:29:14.826991 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:14 crc kubenswrapper[5081]: E1003 15:29:14.827103 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.832850 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.832901 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.832917 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.832941 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.832958 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.935685 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.935728 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.935737 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.935754 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:14 crc kubenswrapper[5081]: I1003 15:29:14.935765 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:14Z","lastTransitionTime":"2025-10-03T15:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.038074 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.038140 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.038157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.038183 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.038201 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.141013 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.141088 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.141101 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.141121 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.141134 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.244481 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.244551 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.244600 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.244631 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.244653 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.348661 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.348732 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.348751 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.348779 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.348799 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.451670 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.451734 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.451751 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.451779 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.451800 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.554941 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.554985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.554999 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.555016 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.555031 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.657132 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.657200 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.657217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.657242 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.657260 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.760896 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.760976 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.760997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.761021 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.761038 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.827511 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:15 crc kubenswrapper[5081]: E1003 15:29:15.827716 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.863590 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.863642 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.863656 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.863674 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.863686 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.966061 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.966112 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.966125 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.966143 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:15 crc kubenswrapper[5081]: I1003 15:29:15.966155 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:15Z","lastTransitionTime":"2025-10-03T15:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.069867 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.069997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.070017 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.070044 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.070067 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.176718 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.176759 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.176770 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.176787 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.176800 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.279481 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.279536 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.279548 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.279595 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.279609 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.382612 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.382781 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.382809 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.382839 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.382858 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.486207 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.486270 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.486287 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.486312 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.486333 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.590228 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.590279 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.590291 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.590309 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.590322 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.693379 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.693907 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.693922 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.693944 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.693958 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.798054 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.798141 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.798187 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.798217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.798235 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.827027 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.827160 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:16 crc kubenswrapper[5081]: E1003 15:29:16.827207 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.827027 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:16 crc kubenswrapper[5081]: E1003 15:29:16.828228 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:16 crc kubenswrapper[5081]: E1003 15:29:16.828642 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.901532 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.901617 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.901637 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.901660 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:16 crc kubenswrapper[5081]: I1003 15:29:16.901678 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:16Z","lastTransitionTime":"2025-10-03T15:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.004945 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.005023 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.005043 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.005073 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.005091 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.108366 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.108440 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.108461 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.108489 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.108513 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.211796 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.211930 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.211956 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.211981 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.211999 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.315621 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.315668 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.315681 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.315700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.315716 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.419104 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.419163 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.419182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.419206 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.419225 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.522804 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.522904 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.522927 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.522965 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.522989 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.625576 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.625614 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.625628 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.625646 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.625658 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.728405 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.728477 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.728500 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.728530 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.728551 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.827977 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:17 crc kubenswrapper[5081]: E1003 15:29:17.828243 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.831874 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.831923 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.831935 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.831952 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.831967 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.934993 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.935088 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.935131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.935167 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:17 crc kubenswrapper[5081]: I1003 15:29:17.935192 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:17Z","lastTransitionTime":"2025-10-03T15:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.038621 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.038746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.038768 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.038796 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.038816 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.141901 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.141955 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.141973 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.141997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.142013 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.244817 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.244890 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.244935 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.244969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.245009 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.348436 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.348515 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.348536 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.348601 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.348628 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.451903 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.451972 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.451991 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.452016 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.452036 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.555168 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.555235 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.555251 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.555289 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.555308 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.663380 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.663753 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.663893 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.663948 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.663974 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.767405 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.767454 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.767464 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.767522 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.767540 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.826828 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.826907 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.826957 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:18 crc kubenswrapper[5081]: E1003 15:29:18.827024 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:18 crc kubenswrapper[5081]: E1003 15:29:18.827195 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:18 crc kubenswrapper[5081]: E1003 15:29:18.827423 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.870409 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.870462 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.870476 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.870500 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.870514 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.973532 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.973603 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.973614 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.973628 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:18 crc kubenswrapper[5081]: I1003 15:29:18.973638 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:18Z","lastTransitionTime":"2025-10-03T15:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.076576 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.076621 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.076630 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.076648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.076662 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.180456 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.180545 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.180612 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.180649 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.180673 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.284774 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.284859 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.284880 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.284915 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.284939 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.387899 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.387966 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.387992 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.388026 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.388052 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.491056 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.491151 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.491197 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.491222 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.491239 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.594328 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.594386 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.594408 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.594436 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.594461 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.697769 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.697841 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.697864 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.697897 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.697918 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.800995 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.801071 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.801088 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.801113 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.801136 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.827738 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:19 crc kubenswrapper[5081]: E1003 15:29:19.827936 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.903649 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.903700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.903709 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.903726 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:19 crc kubenswrapper[5081]: I1003 15:29:19.903736 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:19Z","lastTransitionTime":"2025-10-03T15:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.006642 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.006708 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.006727 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.006753 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.006771 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.109606 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.109664 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.109676 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.109693 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.109709 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.212805 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.212863 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.212879 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.212903 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.212920 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.315057 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.315092 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.315124 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.315138 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.315147 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.324445 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.324501 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.324519 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.324539 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.324586 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.339246 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:20Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.348157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.348364 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.348404 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.348484 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.348504 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.368689 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:20Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.374416 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.374479 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.374496 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.374523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.374542 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.395470 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:20Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.401968 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.402036 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.402050 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.402069 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.402081 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.427754 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:20Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.434045 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.434126 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.434141 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.434198 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.434213 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.448365 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:20Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.448613 5081 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.451029 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.451137 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.451155 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.451182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.451203 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.553905 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.553942 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.553952 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.553970 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.553981 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.657813 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.657863 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.657889 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.657915 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.657933 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.761671 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.761726 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.761739 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.761758 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.761771 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.827601 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.827612 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.827803 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.828181 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.828340 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.828481 5081 scope.go:117] "RemoveContainer" containerID="ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66" Oct 03 15:29:20 crc kubenswrapper[5081]: E1003 15:29:20.828595 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.864486 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.864597 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.864614 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.864667 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.864687 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.968997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.969049 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.969063 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.969089 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:20 crc kubenswrapper[5081]: I1003 15:29:20.969105 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:20Z","lastTransitionTime":"2025-10-03T15:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.072548 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.072625 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.072642 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.072663 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.072679 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.178141 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.178208 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.178229 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.178256 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.178276 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.281369 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.281414 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.281426 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.281448 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.281467 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.321774 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/2.log" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.325882 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.326405 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.343770 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.357423 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.374315 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"2025-10-03T15:28:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873\\\\n2025-10-03T15:28:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873 to /host/opt/cni/bin/\\\\n2025-10-03T15:28:27Z [verbose] multus-daemon started\\\\n2025-10-03T15:28:27Z [verbose] Readiness Indicator file check\\\\n2025-10-03T15:29:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.384403 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.384678 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.384746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.384809 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.384899 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.397928 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.420961 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.435803 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.451968 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.466612 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.482101 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.488183 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.488326 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.488420 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.488516 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.488620 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.497392 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.514856 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.532901 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.546416 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.560798 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.573144 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.587271 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.591500 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.591535 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.591548 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.591585 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.591597 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.604073 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.699810 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.699888 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.699912 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.699940 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.699955 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.803141 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.803200 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.803219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.803244 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.803261 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.826875 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:21 crc kubenswrapper[5081]: E1003 15:29:21.827036 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.843778 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.862941 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.877097 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.893054 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.906615 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.906688 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.906701 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.906717 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.906730 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:21Z","lastTransitionTime":"2025-10-03T15:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.911956 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.927234 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.943404 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.963943 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.978365 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:21 crc kubenswrapper[5081]: I1003 15:29:21.988827 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.000490 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.009893 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.009953 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.009967 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.009994 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.010008 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.013966 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.029284 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.045382 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"2025-10-03T15:28:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873\\\\n2025-10-03T15:28:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873 to /host/opt/cni/bin/\\\\n2025-10-03T15:28:27Z [verbose] multus-daemon started\\\\n2025-10-03T15:28:27Z [verbose] Readiness Indicator file check\\\\n2025-10-03T15:29:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.064738 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.092531 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.106217 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.113509 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.113596 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.113617 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.113645 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.113669 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.216492 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.216606 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.216626 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.216658 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.216678 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.320101 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.320196 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.320214 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.320239 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.320259 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.331300 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/3.log" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.332244 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/2.log" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.336043 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" exitCode=1 Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.336113 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.336186 5081 scope.go:117] "RemoveContainer" containerID="ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.343396 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:29:22 crc kubenswrapper[5081]: E1003 15:29:22.343905 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.378029 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff59892ac0883433c9f94ae52fea631b826a1400dfb3fad923a1b09c2d1e6f66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:28:52Z\\\",\\\"message\\\":\\\"k8s.ovn.org/owner:openshift-ingress-canary/ingress-canary]} name:Service_openshift-ingress-canary/ingress-canary_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.34:8443: 10.217.5.34:8888:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7715118b-bb1b-400a-803e-7ab2cc3eeec0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 15:28:52.717985 6753 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:28:52Z is after 2025-08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:21Z\\\",\\\"message\\\":\\\"failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:29:21.710212 7110 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-7fljw in node crc\\\\nI1003 15:29:21.710179 7110 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-7l6c6\\\\nI1003 15:29:21.710222 7110 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-7fljw after 0 failed attempt(s)\\\\nI1003 15:29:21.710232 7110 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-7fljw\\\\nI1003 15:29:21.710239 7110 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-7l6c6 in\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:29:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.391618 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.407116 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.422717 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.422757 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.422768 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.422786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.422799 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.427679 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.448906 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"2025-10-03T15:28:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873\\\\n2025-10-03T15:28:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873 to /host/opt/cni/bin/\\\\n2025-10-03T15:28:27Z [verbose] multus-daemon started\\\\n2025-10-03T15:28:27Z [verbose] Readiness Indicator file check\\\\n2025-10-03T15:29:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.472202 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.492667 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.508735 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.526666 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.526712 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.526722 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.526740 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.526753 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.527320 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.545088 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.561380 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.581501 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.599276 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.615446 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.629935 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.630366 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.630443 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.630475 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.630496 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.635110 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.652615 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.670032 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:22Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.734043 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.734109 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.734126 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.734152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.734169 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.826621 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.826698 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:22 crc kubenswrapper[5081]: E1003 15:29:22.826877 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.826903 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:22 crc kubenswrapper[5081]: E1003 15:29:22.827072 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:22 crc kubenswrapper[5081]: E1003 15:29:22.827193 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.838310 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.838368 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.838393 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.838422 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.838444 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.981881 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.981997 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.982052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.982084 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:22 crc kubenswrapper[5081]: I1003 15:29:22.982105 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:22Z","lastTransitionTime":"2025-10-03T15:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.085118 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.085203 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.085219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.085243 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.085260 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.187671 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.187720 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.187736 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.187753 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.187765 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.290673 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.290741 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.290760 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.290788 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.290811 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.343289 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/3.log" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.351309 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:29:23 crc kubenswrapper[5081]: E1003 15:29:23.351657 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.373285 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.393912 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.393990 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.394042 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.394069 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.394090 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.396120 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.422456 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.440134 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.465807 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.496408 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:21Z\\\",\\\"message\\\":\\\"failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:29:21.710212 7110 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-7fljw in node crc\\\\nI1003 15:29:21.710179 7110 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-7l6c6\\\\nI1003 15:29:21.710222 7110 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-7fljw after 0 failed attempt(s)\\\\nI1003 15:29:21.710232 7110 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-7fljw\\\\nI1003 15:29:21.710239 7110 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-7l6c6 in\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:29:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.497057 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.497111 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.497133 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.497163 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.497187 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.515461 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.538697 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.559132 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.580858 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"2025-10-03T15:28:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873\\\\n2025-10-03T15:28:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873 to /host/opt/cni/bin/\\\\n2025-10-03T15:28:27Z [verbose] multus-daemon started\\\\n2025-10-03T15:28:27Z [verbose] Readiness Indicator file check\\\\n2025-10-03T15:29:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.595947 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.600733 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.600788 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.600806 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.600830 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.600848 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.616737 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.638334 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.659662 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.684100 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.704052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.704135 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.704159 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.704188 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.704209 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.707253 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.730023 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:23Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.806760 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.806817 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.806826 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.806844 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.806854 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.827209 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:23 crc kubenswrapper[5081]: E1003 15:29:23.827374 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.909593 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.909658 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.909675 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.909700 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:23 crc kubenswrapper[5081]: I1003 15:29:23.909717 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:23Z","lastTransitionTime":"2025-10-03T15:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.012963 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.013011 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.013019 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.013036 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.013047 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.116328 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.116413 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.116485 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.116523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.116548 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.219724 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.219794 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.219810 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.219835 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.219855 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.323648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.323708 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.323725 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.323746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.323762 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.427512 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.427606 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.427624 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.427649 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.427666 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.531698 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.531765 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.531786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.531817 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.531837 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.636036 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.636123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.636148 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.636182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.636207 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.739452 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.739511 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.739535 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.739600 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.739630 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.827052 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.827445 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:24 crc kubenswrapper[5081]: E1003 15:29:24.827749 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:24 crc kubenswrapper[5081]: E1003 15:29:24.828224 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.827795 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:24 crc kubenswrapper[5081]: E1003 15:29:24.828434 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.850773 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.850837 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.850860 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.850891 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.850915 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.871288 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.954393 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.954450 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.954463 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.954488 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:24 crc kubenswrapper[5081]: I1003 15:29:24.954503 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:24Z","lastTransitionTime":"2025-10-03T15:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.056508 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.056633 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.056656 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.056688 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.056709 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.159771 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.159846 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.159907 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.159940 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.159961 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.262869 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.262937 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.262963 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.262992 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.263020 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.365474 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.365538 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.365593 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.365624 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.365648 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.468745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.468796 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.468814 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.468836 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.468853 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.576725 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.576814 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.576838 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.576872 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.576896 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.680419 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.680474 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.680488 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.680508 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.680523 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.784382 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.784439 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.784455 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.784479 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.784496 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.826888 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:25 crc kubenswrapper[5081]: E1003 15:29:25.827145 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.888250 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.888318 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.888341 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.888370 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.888392 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.991511 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.991595 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.991613 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.991636 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:25 crc kubenswrapper[5081]: I1003 15:29:25.991655 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:25Z","lastTransitionTime":"2025-10-03T15:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.094152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.094231 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.094245 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.094266 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.094277 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.196868 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.196917 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.196932 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.196958 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.196974 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.299851 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.299949 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.299963 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.299981 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.299995 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.402150 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.402213 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.402232 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.402257 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.402277 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.505641 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.505719 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.505746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.505782 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.505807 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.608977 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.609030 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.609052 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.609073 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.609089 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.661087 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.661254 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.661377 5081 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.661389 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.661343466 +0000 UTC m=+149.626900109 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.661475 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.661451149 +0000 UTC m=+149.627007802 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.661545 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.661766 5081 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.661831 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.66181582 +0000 UTC m=+149.627372473 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.712344 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.712394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.712431 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.712464 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.712490 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.762183 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.762264 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762410 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762448 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762469 5081 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762479 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762521 5081 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762543 5081 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762589 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.762539498 +0000 UTC m=+149.728096141 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.762655 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.762634551 +0000 UTC m=+149.728191204 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.815500 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.815554 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.815598 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.815625 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.815650 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.826880 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.826964 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.826974 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.827058 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.827246 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:26 crc kubenswrapper[5081]: E1003 15:29:26.827339 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.919238 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.919316 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.919341 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.919367 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:26 crc kubenswrapper[5081]: I1003 15:29:26.919385 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:26Z","lastTransitionTime":"2025-10-03T15:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.023547 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.023687 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.023712 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.023746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.023775 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.126507 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.126588 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.126608 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.126632 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.126651 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.229107 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.229179 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.229201 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.229234 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.229255 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.331804 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.331867 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.331885 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.331913 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.331931 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.435120 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.435193 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.435217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.435247 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.435270 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.538628 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.538701 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.538728 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.538761 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.538784 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.642320 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.642368 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.642376 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.642394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.642404 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.746587 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.746663 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.746682 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.746713 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.746736 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.827098 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:27 crc kubenswrapper[5081]: E1003 15:29:27.827472 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.850172 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.850221 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.850241 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.850271 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.850290 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.961186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.961249 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.961271 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.961302 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:27 crc kubenswrapper[5081]: I1003 15:29:27.961323 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:27Z","lastTransitionTime":"2025-10-03T15:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.064253 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.064315 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.064332 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.064395 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.064413 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.167934 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.167989 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.168002 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.168021 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.168036 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.271926 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.271979 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.271988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.272005 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.272017 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.377280 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.377349 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.377358 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.377377 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.377388 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.480329 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.480388 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.480403 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.480425 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.480441 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.584061 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.584136 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.584188 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.584219 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.584237 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.688927 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.689010 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.689025 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.689046 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.689060 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.792214 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.792260 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.792274 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.792293 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.792309 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.826964 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.827072 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.827208 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:28 crc kubenswrapper[5081]: E1003 15:29:28.827352 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:28 crc kubenswrapper[5081]: E1003 15:29:28.827635 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:28 crc kubenswrapper[5081]: E1003 15:29:28.827714 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.895186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.895245 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.895264 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.895289 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.895308 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.998917 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.998969 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.998983 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.999007 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:28 crc kubenswrapper[5081]: I1003 15:29:28.999028 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:28Z","lastTransitionTime":"2025-10-03T15:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.102522 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.102620 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.102639 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.102667 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.102685 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.206037 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.206077 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.206089 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.206109 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.206121 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.308682 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.308758 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.308797 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.308835 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.308862 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.411705 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.411753 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.411770 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.411794 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.411815 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.515736 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.515814 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.515863 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.515892 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.515911 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.618303 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.618346 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.618360 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.618379 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.618390 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.722230 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.722285 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.722298 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.722321 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.722334 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.826445 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.826483 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.826494 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.826510 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.826525 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.826724 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:29 crc kubenswrapper[5081]: E1003 15:29:29.826900 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.929960 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.930064 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.930093 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.930132 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:29 crc kubenswrapper[5081]: I1003 15:29:29.930156 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:29Z","lastTransitionTime":"2025-10-03T15:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.033528 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.033597 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.033612 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.033634 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.033647 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.136812 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.136871 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.136883 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.136906 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.136918 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.239773 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.239862 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.239880 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.239909 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.239928 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.348552 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.349263 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.349300 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.349334 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.349357 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.452495 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.452534 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.452546 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.452595 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.452614 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.555422 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.555513 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.555532 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.555555 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.555584 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.659072 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.659125 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.659138 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.659159 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.659176 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.763786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.763857 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.763874 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.763902 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.763926 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.826798 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.826829 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.827018 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.827226 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.828296 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.828447 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.838094 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.838171 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.838191 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.838224 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.838245 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.854552 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.860102 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.860157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.860172 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.860191 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.860207 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.873232 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.878068 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.878117 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.878131 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.878153 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.878167 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.891500 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.896322 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.896417 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.896440 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.896462 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.896504 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.910139 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.915303 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.915409 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.915431 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.915465 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.915485 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.928168 5081 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7d41c104-ec72-4a46-8cc3-ea2023941cda\\\",\\\"systemUUID\\\":\\\"57f4f478-2d02-4730-ae6e-811ee98398a9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:30Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:30 crc kubenswrapper[5081]: E1003 15:29:30.928408 5081 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.930147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.930177 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.930209 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.930244 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:30 crc kubenswrapper[5081]: I1003 15:29:30.930256 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:30Z","lastTransitionTime":"2025-10-03T15:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.033398 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.033439 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.033454 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.033473 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.033486 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.136679 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.136732 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.136744 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.136762 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.136773 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.239465 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.239519 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.239530 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.239553 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.239590 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.342799 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.342852 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.342864 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.342887 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.342899 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.445955 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.446008 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.446027 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.446054 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.446071 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.549752 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.549792 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.549804 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.549820 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.549831 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.652458 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.652521 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.652534 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.652575 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.652589 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.756387 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.756485 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.756523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.756589 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.756618 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.826766 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:31 crc kubenswrapper[5081]: E1003 15:29:31.827253 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.845908 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8b383b3-0176-4c0a-9129-3ed5cf3c315b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aa99747a6aa279c3920fb88e984a1ab7b98faad74f175c7f7d3e14ea0cbafe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ae7ad5274d47915e62eff4e7f037a517875ec91ae493215047d90f7b2500f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ae7ad5274d47915e62eff4e7f037a517875ec91ae493215047d90f7b2500f9a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.864207 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.864304 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.864331 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.864360 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.864390 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.866294 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.884252 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.899655 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb604aa8f299c43d96495e78d5a48f0c29cb92136517d8d22ca378dabfe43cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b43d2bcc23b5278053f21734b3e26f4b1f45e987b99120606f5b1f0e82dc328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.916649 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc7492d68a8f7d718c58efb30e9128ae7b9f224fd06d604a31d764f5f1506d1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.934290 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b80cb7-1043-4bad-98eb-7c039b7c0a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3268e4fa1639027a9a0dc56ac353d329f1668cca4cb0a0e2a20835f3ed81d03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf1f3539c6970e9635cba66380ecb651f39aa4b50ebbe982a1dcb75c94dcffe7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81873fdc929b3f505ecf9cca48a9ad56fbeda2883f28a65624d8001f0f344826\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8338fc05b0459732cfd32db1a3efbbd6ac3ed9c7e1a7695bb04514240c28dcf7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c3beb60d498ef4a418db5dc098a4b00d9843e4e8685526ca25958dbb19b69161\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1003 15:28:15.575493 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 15:28:15.577963 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1642592073/tls.crt::/tmp/serving-cert-1642592073/tls.key\\\\\\\"\\\\nI1003 15:28:22.248042 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 15:28:22.251007 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 15:28:22.251031 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 15:28:22.251049 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 15:28:22.251055 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 15:28:22.260522 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1003 15:28:22.260600 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 15:28:22.260614 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 15:28:22.260619 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 15:28:22.260623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 15:28:22.260627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1003 15:28:22.260537 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1003 15:28:22.263219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12db32a1d54e219edcd64bd7041bae9209b7347c1739dd43034be1cc2153902f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e0ecd1436b28abbb93831b7ce0188f8555b562afa5df3e8fb5d81d65a74b72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.950637 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"465f2bfe-2cad-4db3-92fe-7bb196838cb9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a48ae91a6a7f380c41d03a5af805ec7cb4a1c2a7bf93b2dccb2aaf8a94550d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://31462e0ca4978732ea2f5c10ef173b96bd6bc60c48d83d1952e05c43942dc60d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d507085b0ae8f4f7b53d537e1ab49d6534954d6c17c068da4c3f15ddf42bb87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a537bd695a3f20528a1b59feba300577f13c8278311ab53feda7882dc70ad6ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.965745 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc0e93f4-3228-4f47-8edf-4d12bf3baddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6ecfdbf40d1444a8ec31b99ee6978cb84de737388258d3a3365fee9144c42cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bj8tk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-lkz79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.967385 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.967434 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.967471 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.967528 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.967637 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:31Z","lastTransitionTime":"2025-10-03T15:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.978593 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a04712b2-e5d7-412d-83d5-f27feff1e899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5cb1ca2548e0cd7145f8304bbcd4820c9e17b5201144d1e1912a02b7d4e0079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://462d536f00a9d495ddee21f425a418d134887772293efeaef9085893c23f73e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a54d90e6353184818513ee42bc634e9c8192bba1c2854324ae535dca60d2bd76\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3804268e4a029c4997aa0ee46ebde0dee83706c4c9711dfb30d11fdfebea88f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:01Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:31 crc kubenswrapper[5081]: I1003 15:29:31.990056 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7l6c6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edef5639-bdcb-4f74-b2dc-d4bb64e24d85\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0384cd606a7defaaa04dcf9f7e90751c0d472be65ae0cfaa01ac707983e3b1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p5kss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7l6c6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.000429 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1030e4-8512-4273-9a04-2cc7d89903ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308f8e3b0cb8ea68d69b4d0c3abce54f4c0b42a2f70cf708a56a9bf2d93a9c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://832fdfd29baea484b2c7f9488473fdbe0778df0266d6f9e155ab2f16295931c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6dp62\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76b6m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:31Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.013300 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zdszj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"660149c8-a5c7-4581-abae-89611dafa042\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l78p5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zdszj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.029238 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0128f75170075ea830722aa274f4f58bb7c22e683a507ed5bf5e6780ebf1c9f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.043340 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.057544 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-7fljw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6b6616-1e4c-4618-890b-7eb334b8c339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:29:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:12Z\\\",\\\"message\\\":\\\"2025-10-03T15:28:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873\\\\n2025-10-03T15:28:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ca4970ea-cb21-45a4-a347-be431aa69873 to /host/opt/cni/bin/\\\\n2025-10-03T15:28:27Z [verbose] multus-daemon started\\\\n2025-10-03T15:28:27Z [verbose] Readiness Indicator file check\\\\n2025-10-03T15:29:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:29:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-796d5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-7fljw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.069984 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.070288 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.070444 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.070629 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.070788 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.073327 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sz44f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e312ca3c-e5c4-49de-a60e-a5d6bcbbcec6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f9756a00b015645638bef26fa3a556333d6f3f4e5a459b0dd56b4562f126e15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4baf5a9dd4b91f9c26ec13ef81ff033d45f8bea131a2b0ced57a515e6cfe400\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a502c46a0f29e4a6045c3d2bea87ad8f167823e04fef9a6b4f20b8ed4698377\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a7280eecaf1763e759e2e52c74139e102b306ffeb162153014587461579ff01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e969c75205248d5d499a384807053ca633c6370469383e9952352130e8af493\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d263c8b08c3156cbd6b7be245a2a22896db192cb98f4fb1fe5701d88d6d2428b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fac79c6bec0505627b1b30f54ca1b39cccd8b30d3618904a0c44043fd9a81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nbmn5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:24Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sz44f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.097118 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63642c7-8d80-4615-94d9-91d4c41421cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T15:29:21Z\\\",\\\"message\\\":\\\"failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 15:29:21.710212 7110 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-7fljw in node crc\\\\nI1003 15:29:21.710179 7110 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-7l6c6\\\\nI1003 15:29:21.710222 7110 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-7fljw after 0 failed attempt(s)\\\\nI1003 15:29:21.710232 7110 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-7fljw\\\\nI1003 15:29:21.710239 7110 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-7l6c6 in\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T15:29:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T15:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T15:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rphc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5bxx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.112284 5081 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wg679" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4efa7d08-97eb-4655-8ee6-be870ebcd421\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T15:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52edc4d118b21db2ae0ac16109d0f172931c9e58cb4f752451876acbf56b5f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T15:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dnpmt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T15:28:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wg679\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T15:29:32Z is after 2025-08-24T17:21:41Z" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.174654 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.174704 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.174718 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.174737 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.174750 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.277892 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.277970 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.277986 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.278011 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.278028 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.381023 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.381312 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.381394 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.381482 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.381549 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.484998 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.485100 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.485123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.485157 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.485183 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.589103 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.589143 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.589198 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.589217 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.589231 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.692258 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.692307 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.692317 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.692339 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.692359 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.795449 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.795501 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.795511 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.795530 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.795547 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.827163 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.827191 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.827312 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:32 crc kubenswrapper[5081]: E1003 15:29:32.827498 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:32 crc kubenswrapper[5081]: E1003 15:29:32.827778 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:32 crc kubenswrapper[5081]: E1003 15:29:32.827659 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.898726 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.898792 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.898811 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.898860 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:32 crc kubenswrapper[5081]: I1003 15:29:32.898881 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:32Z","lastTransitionTime":"2025-10-03T15:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.001970 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.002051 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.002077 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.002110 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.002136 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.104701 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.104752 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.104765 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.104791 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.104802 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.208321 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.208382 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.208396 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.208415 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.208428 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.311418 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.311486 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.311502 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.311522 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.311536 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.413795 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.413847 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.413860 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.413875 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.413889 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.517450 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.517515 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.517532 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.517589 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.517609 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.620730 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.620786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.620800 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.620821 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.620836 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.724175 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.724231 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.724248 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.724272 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.724287 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.826940 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.827118 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.827170 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.827186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.827202 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.827218 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:33 crc kubenswrapper[5081]: E1003 15:29:33.827476 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.848117 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.931123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.931225 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.931244 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.931270 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:33 crc kubenswrapper[5081]: I1003 15:29:33.931289 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:33Z","lastTransitionTime":"2025-10-03T15:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.034680 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.034731 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.034746 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.034767 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.034781 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.139239 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.139315 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.139338 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.139379 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.139402 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.242497 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.242594 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.242615 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.242645 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.242664 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.346654 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.346801 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.346821 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.346849 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.346904 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.450520 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.450640 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.450706 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.450743 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.450803 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.554769 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.554853 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.554880 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.554907 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.554930 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.658886 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.658961 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.658985 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.659018 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.659042 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.762068 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.762118 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.762129 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.762147 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.762159 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.827596 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.827786 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.827819 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:34 crc kubenswrapper[5081]: E1003 15:29:34.827778 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:34 crc kubenswrapper[5081]: E1003 15:29:34.827894 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:34 crc kubenswrapper[5081]: E1003 15:29:34.828059 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.865334 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.865385 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.865399 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.865433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.865458 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.969223 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.969279 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.969297 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.969320 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:34 crc kubenswrapper[5081]: I1003 15:29:34.969334 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:34Z","lastTransitionTime":"2025-10-03T15:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.072728 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.072792 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.072812 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.072843 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.072864 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.176484 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.176548 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.176592 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.176619 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.176638 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.283263 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.283372 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.283395 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.283477 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.283535 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.386493 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.386606 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.386634 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.386665 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.386691 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.489285 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.489439 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.489476 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.489508 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.489533 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.593086 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.593146 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.593169 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.593197 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.593221 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.696225 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.696786 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.696808 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.696835 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.696857 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.800343 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.800419 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.800438 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.800468 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.800485 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.827148 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:35 crc kubenswrapper[5081]: E1003 15:29:35.827320 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.904182 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.904241 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.904262 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.904292 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:35 crc kubenswrapper[5081]: I1003 15:29:35.904316 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:35Z","lastTransitionTime":"2025-10-03T15:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.007550 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.007622 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.007638 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.007661 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.007678 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.111073 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.111143 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.111160 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.111185 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.111204 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.215424 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.215512 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.215535 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.215605 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.215632 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.319827 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.319878 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.319891 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.319909 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.319920 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.422914 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.422988 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.423005 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.423031 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.423051 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.527000 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.527063 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.527089 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.527123 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.527146 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.630372 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.630446 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.630471 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.630499 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.630538 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.734404 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.734473 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.734492 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.734516 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.734533 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.826802 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.826866 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.826806 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:36 crc kubenswrapper[5081]: E1003 15:29:36.827014 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:36 crc kubenswrapper[5081]: E1003 15:29:36.827099 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:36 crc kubenswrapper[5081]: E1003 15:29:36.827193 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.837150 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.837215 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.837273 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.837302 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.837328 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.940592 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.940665 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.940683 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.940709 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:36 crc kubenswrapper[5081]: I1003 15:29:36.940728 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:36Z","lastTransitionTime":"2025-10-03T15:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.043715 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.043762 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.043778 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.043801 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.043817 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.147804 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.147873 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.147898 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.147933 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.147958 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.251141 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.251203 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.251220 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.251244 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.251264 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.354429 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.354495 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.354514 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.354537 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.354556 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.456612 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.456677 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.456694 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.456719 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.456737 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.559855 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.559931 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.559956 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.559986 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.560003 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.663043 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.663087 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.663099 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.663116 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.663129 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.766290 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.766346 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.766364 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.766388 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.766405 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.827382 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:37 crc kubenswrapper[5081]: E1003 15:29:37.827629 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.828304 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:29:37 crc kubenswrapper[5081]: E1003 15:29:37.828476 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.868509 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.868599 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.868618 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.868646 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.868683 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.972204 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.972272 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.972290 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.972317 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:37 crc kubenswrapper[5081]: I1003 15:29:37.972335 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:37Z","lastTransitionTime":"2025-10-03T15:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.075432 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.075492 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.075503 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.075523 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.075536 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.179097 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.179168 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.179184 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.179210 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.179228 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.282256 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.282328 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.282343 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.282366 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.282382 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.385528 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.385610 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.385627 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.385648 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.385661 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.488447 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.488513 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.488538 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.488601 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.488629 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.591619 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.591702 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.591724 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.591749 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.591767 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.694621 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.694669 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.694684 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.694706 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.694721 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.798226 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.798273 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.798288 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.798312 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.798327 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.827165 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.827326 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.827412 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:38 crc kubenswrapper[5081]: E1003 15:29:38.827550 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:38 crc kubenswrapper[5081]: E1003 15:29:38.827743 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:38 crc kubenswrapper[5081]: E1003 15:29:38.827845 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.901684 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.901750 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.901771 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.901800 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:38 crc kubenswrapper[5081]: I1003 15:29:38.901824 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:38Z","lastTransitionTime":"2025-10-03T15:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.004971 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.005042 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.005064 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.005096 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.005121 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.107658 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.107708 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.107723 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.107745 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.107762 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.210386 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.210433 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.210447 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.210467 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.210478 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.314465 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.314535 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.314611 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.314644 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.314665 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.417101 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.417165 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.417186 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.417214 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.417234 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.520412 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.520474 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.520485 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.520508 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.520521 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.623879 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.623928 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.623940 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.623957 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.623970 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.728721 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.728795 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.728816 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.728847 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.728869 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.827036 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:39 crc kubenswrapper[5081]: E1003 15:29:39.827224 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.831457 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.831519 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.831537 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.831597 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.831620 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.934540 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.934595 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.934606 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.934624 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:39 crc kubenswrapper[5081]: I1003 15:29:39.934635 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:39Z","lastTransitionTime":"2025-10-03T15:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.037991 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.038040 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.038050 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.038071 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.038084 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.141544 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.141653 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.141670 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.141697 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.141718 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.245392 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.245468 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.245491 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.245524 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.245550 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.349199 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.349272 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.349290 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.349318 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.349344 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.452321 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.452387 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.452401 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.452424 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.452437 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.555678 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.555744 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.555760 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.555788 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.555806 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.659493 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.659608 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.659639 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.659678 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.659706 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.762909 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.763001 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.763030 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.763068 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.763097 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.827278 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.827337 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.827277 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:40 crc kubenswrapper[5081]: E1003 15:29:40.827466 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:40 crc kubenswrapper[5081]: E1003 15:29:40.827737 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:40 crc kubenswrapper[5081]: E1003 15:29:40.827842 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.866999 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.867108 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.867128 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.867192 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.867212 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.970359 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.970428 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.970446 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.970470 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:40 crc kubenswrapper[5081]: I1003 15:29:40.970489 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:40Z","lastTransitionTime":"2025-10-03T15:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.075589 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.075643 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.075653 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.075674 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.075688 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:41Z","lastTransitionTime":"2025-10-03T15:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.178097 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.178143 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.178152 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.178170 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.178182 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:41Z","lastTransitionTime":"2025-10-03T15:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.274914 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.274978 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.274998 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.275025 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.275049 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:41Z","lastTransitionTime":"2025-10-03T15:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.302668 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.302732 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.302753 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.302789 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.302809 5081 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T15:29:41Z","lastTransitionTime":"2025-10-03T15:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.338790 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt"] Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.339404 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.341574 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.343254 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.343369 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.343696 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.398056 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-wg679" podStartSLOduration=77.398017735 podStartE2EDuration="1m17.398017735s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.397347895 +0000 UTC m=+100.362904548" watchObservedRunningTime="2025-10-03 15:29:41.398017735 +0000 UTC m=+100.363574338" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.426086 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee35a822-c520-4149-b2d0-5d8eaf54a785-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.426143 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee35a822-c520-4149-b2d0-5d8eaf54a785-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.426207 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ee35a822-c520-4149-b2d0-5d8eaf54a785-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.426437 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ee35a822-c520-4149-b2d0-5d8eaf54a785-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.426513 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ee35a822-c520-4149-b2d0-5d8eaf54a785-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.499009 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-7fljw" podStartSLOduration=77.49898991 podStartE2EDuration="1m17.49898991s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.471920095 +0000 UTC m=+100.437476718" watchObservedRunningTime="2025-10-03 15:29:41.49898991 +0000 UTC m=+100.464546523" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.499243 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-sz44f" podStartSLOduration=77.499229347 podStartE2EDuration="1m17.499229347s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.498946219 +0000 UTC m=+100.464502872" watchObservedRunningTime="2025-10-03 15:29:41.499229347 +0000 UTC m=+100.464785960" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.527830 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ee35a822-c520-4149-b2d0-5d8eaf54a785-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.527882 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ee35a822-c520-4149-b2d0-5d8eaf54a785-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.527907 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee35a822-c520-4149-b2d0-5d8eaf54a785-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.527933 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee35a822-c520-4149-b2d0-5d8eaf54a785-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.527969 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ee35a822-c520-4149-b2d0-5d8eaf54a785-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.528046 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ee35a822-c520-4149-b2d0-5d8eaf54a785-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.528110 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ee35a822-c520-4149-b2d0-5d8eaf54a785-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.529213 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ee35a822-c520-4149-b2d0-5d8eaf54a785-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.532257 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=17.532236656 podStartE2EDuration="17.532236656s" podCreationTimestamp="2025-10-03 15:29:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.531777503 +0000 UTC m=+100.497334126" watchObservedRunningTime="2025-10-03 15:29:41.532236656 +0000 UTC m=+100.497793279" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.537457 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee35a822-c520-4149-b2d0-5d8eaf54a785-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.552249 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee35a822-c520-4149-b2d0-5d8eaf54a785-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m4vwt\" (UID: \"ee35a822-c520-4149-b2d0-5d8eaf54a785\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.613788 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=79.613761891 podStartE2EDuration="1m19.613761891s" podCreationTimestamp="2025-10-03 15:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.613622237 +0000 UTC m=+100.579178870" watchObservedRunningTime="2025-10-03 15:29:41.613761891 +0000 UTC m=+100.579318514" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.632613 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.632585773 podStartE2EDuration="52.632585773s" podCreationTimestamp="2025-10-03 15:28:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.631497351 +0000 UTC m=+100.597053964" watchObservedRunningTime="2025-10-03 15:29:41.632585773 +0000 UTC m=+100.598142386" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.644204 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podStartSLOduration=77.644180484 podStartE2EDuration="1m17.644180484s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.643546025 +0000 UTC m=+100.609102668" watchObservedRunningTime="2025-10-03 15:29:41.644180484 +0000 UTC m=+100.609737097" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.657367 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.679429 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=74.679397248 podStartE2EDuration="1m14.679397248s" podCreationTimestamp="2025-10-03 15:28:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.677579905 +0000 UTC m=+100.643136508" watchObservedRunningTime="2025-10-03 15:29:41.679397248 +0000 UTC m=+100.644953881" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.709136 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=8.709118431 podStartE2EDuration="8.709118431s" podCreationTimestamp="2025-10-03 15:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.708591496 +0000 UTC m=+100.674148139" watchObservedRunningTime="2025-10-03 15:29:41.709118431 +0000 UTC m=+100.674675044" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.721429 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-7l6c6" podStartSLOduration=77.721390291 podStartE2EDuration="1m17.721390291s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.719886647 +0000 UTC m=+100.685443260" watchObservedRunningTime="2025-10-03 15:29:41.721390291 +0000 UTC m=+100.686946924" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.733081 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76b6m" podStartSLOduration=77.733052124 podStartE2EDuration="1m17.733052124s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:41.732673833 +0000 UTC m=+100.698230466" watchObservedRunningTime="2025-10-03 15:29:41.733052124 +0000 UTC m=+100.698608757" Oct 03 15:29:41 crc kubenswrapper[5081]: I1003 15:29:41.827375 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:41 crc kubenswrapper[5081]: E1003 15:29:41.828607 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:42 crc kubenswrapper[5081]: I1003 15:29:42.420816 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" event={"ID":"ee35a822-c520-4149-b2d0-5d8eaf54a785","Type":"ContainerStarted","Data":"41281d766e43f5fc9c52d4aa436232ae3049a4454457d52fdc0510acde3ffe8d"} Oct 03 15:29:42 crc kubenswrapper[5081]: I1003 15:29:42.420889 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" event={"ID":"ee35a822-c520-4149-b2d0-5d8eaf54a785","Type":"ContainerStarted","Data":"69beed76e19b416a18c2c03a93c2c722e8e9948081f0b3ea69050a6bd60ffc65"} Oct 03 15:29:42 crc kubenswrapper[5081]: I1003 15:29:42.440387 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m4vwt" podStartSLOduration=78.440353136 podStartE2EDuration="1m18.440353136s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:29:42.440016606 +0000 UTC m=+101.405573299" watchObservedRunningTime="2025-10-03 15:29:42.440353136 +0000 UTC m=+101.405909819" Oct 03 15:29:42 crc kubenswrapper[5081]: I1003 15:29:42.744879 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:42 crc kubenswrapper[5081]: E1003 15:29:42.745060 5081 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:29:42 crc kubenswrapper[5081]: E1003 15:29:42.745140 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs podName:660149c8-a5c7-4581-abae-89611dafa042 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:46.745115846 +0000 UTC m=+165.710672469 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs") pod "network-metrics-daemon-zdszj" (UID: "660149c8-a5c7-4581-abae-89611dafa042") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 15:29:42 crc kubenswrapper[5081]: I1003 15:29:42.827369 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:42 crc kubenswrapper[5081]: I1003 15:29:42.827406 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:42 crc kubenswrapper[5081]: E1003 15:29:42.827510 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:42 crc kubenswrapper[5081]: I1003 15:29:42.827534 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:42 crc kubenswrapper[5081]: E1003 15:29:42.827695 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:42 crc kubenswrapper[5081]: E1003 15:29:42.827754 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:43 crc kubenswrapper[5081]: I1003 15:29:43.827112 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:43 crc kubenswrapper[5081]: E1003 15:29:43.827275 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:44 crc kubenswrapper[5081]: I1003 15:29:44.827131 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:44 crc kubenswrapper[5081]: I1003 15:29:44.827182 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:44 crc kubenswrapper[5081]: I1003 15:29:44.827207 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:44 crc kubenswrapper[5081]: E1003 15:29:44.827348 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:44 crc kubenswrapper[5081]: E1003 15:29:44.827466 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:44 crc kubenswrapper[5081]: E1003 15:29:44.827632 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:45 crc kubenswrapper[5081]: I1003 15:29:45.827588 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:45 crc kubenswrapper[5081]: E1003 15:29:45.827868 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:46 crc kubenswrapper[5081]: I1003 15:29:46.827503 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:46 crc kubenswrapper[5081]: I1003 15:29:46.827582 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:46 crc kubenswrapper[5081]: I1003 15:29:46.827551 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:46 crc kubenswrapper[5081]: E1003 15:29:46.827690 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:46 crc kubenswrapper[5081]: E1003 15:29:46.827922 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:46 crc kubenswrapper[5081]: E1003 15:29:46.827996 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:47 crc kubenswrapper[5081]: I1003 15:29:47.827478 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:47 crc kubenswrapper[5081]: E1003 15:29:47.827744 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:48 crc kubenswrapper[5081]: I1003 15:29:48.827007 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:48 crc kubenswrapper[5081]: I1003 15:29:48.827131 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:48 crc kubenswrapper[5081]: I1003 15:29:48.827007 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:48 crc kubenswrapper[5081]: E1003 15:29:48.827201 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:48 crc kubenswrapper[5081]: E1003 15:29:48.827423 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:48 crc kubenswrapper[5081]: E1003 15:29:48.827750 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:49 crc kubenswrapper[5081]: I1003 15:29:49.827376 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:49 crc kubenswrapper[5081]: E1003 15:29:49.827630 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:50 crc kubenswrapper[5081]: I1003 15:29:50.827162 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:50 crc kubenswrapper[5081]: I1003 15:29:50.827210 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:50 crc kubenswrapper[5081]: I1003 15:29:50.827162 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:50 crc kubenswrapper[5081]: E1003 15:29:50.827378 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:50 crc kubenswrapper[5081]: E1003 15:29:50.827501 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:50 crc kubenswrapper[5081]: E1003 15:29:50.827703 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:51 crc kubenswrapper[5081]: I1003 15:29:51.826907 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:51 crc kubenswrapper[5081]: E1003 15:29:51.828773 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:52 crc kubenswrapper[5081]: I1003 15:29:52.827406 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:52 crc kubenswrapper[5081]: I1003 15:29:52.827946 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:52 crc kubenswrapper[5081]: I1003 15:29:52.828005 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:52 crc kubenswrapper[5081]: E1003 15:29:52.828179 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:52 crc kubenswrapper[5081]: I1003 15:29:52.828340 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:29:52 crc kubenswrapper[5081]: E1003 15:29:52.828351 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:52 crc kubenswrapper[5081]: E1003 15:29:52.828483 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:52 crc kubenswrapper[5081]: E1003 15:29:52.828720 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5bxx6_openshift-ovn-kubernetes(e63642c7-8d80-4615-94d9-91d4c41421cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:29:53 crc kubenswrapper[5081]: I1003 15:29:53.827200 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:53 crc kubenswrapper[5081]: E1003 15:29:53.827581 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:54 crc kubenswrapper[5081]: I1003 15:29:54.827301 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:54 crc kubenswrapper[5081]: I1003 15:29:54.827652 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:54 crc kubenswrapper[5081]: I1003 15:29:54.827653 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:54 crc kubenswrapper[5081]: E1003 15:29:54.828108 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:54 crc kubenswrapper[5081]: E1003 15:29:54.828240 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:54 crc kubenswrapper[5081]: E1003 15:29:54.828394 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:55 crc kubenswrapper[5081]: I1003 15:29:55.827699 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:55 crc kubenswrapper[5081]: E1003 15:29:55.828433 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:56 crc kubenswrapper[5081]: I1003 15:29:56.829051 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:56 crc kubenswrapper[5081]: I1003 15:29:56.829238 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:56 crc kubenswrapper[5081]: E1003 15:29:56.829485 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:56 crc kubenswrapper[5081]: I1003 15:29:56.829613 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:56 crc kubenswrapper[5081]: E1003 15:29:56.829731 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:56 crc kubenswrapper[5081]: E1003 15:29:56.829712 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:57 crc kubenswrapper[5081]: I1003 15:29:57.827314 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:57 crc kubenswrapper[5081]: E1003 15:29:57.827790 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.485159 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/1.log" Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.486001 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/0.log" Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.486085 5081 generic.go:334] "Generic (PLEG): container finished" podID="af6b6616-1e4c-4618-890b-7eb334b8c339" containerID="b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e" exitCode=1 Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.486141 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerDied","Data":"b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e"} Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.486193 5081 scope.go:117] "RemoveContainer" containerID="c4eeb1fa1db6330a019321809b1291ffa60c4d8b86121eb01c65ce4a86c80b40" Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.486899 5081 scope.go:117] "RemoveContainer" containerID="b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e" Oct 03 15:29:58 crc kubenswrapper[5081]: E1003 15:29:58.487196 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-7fljw_openshift-multus(af6b6616-1e4c-4618-890b-7eb334b8c339)\"" pod="openshift-multus/multus-7fljw" podUID="af6b6616-1e4c-4618-890b-7eb334b8c339" Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.827347 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.827431 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:29:58 crc kubenswrapper[5081]: I1003 15:29:58.827455 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:29:58 crc kubenswrapper[5081]: E1003 15:29:58.827665 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:29:58 crc kubenswrapper[5081]: E1003 15:29:58.827841 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:29:58 crc kubenswrapper[5081]: E1003 15:29:58.827991 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:29:59 crc kubenswrapper[5081]: I1003 15:29:59.492626 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/1.log" Oct 03 15:29:59 crc kubenswrapper[5081]: I1003 15:29:59.827294 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:29:59 crc kubenswrapper[5081]: E1003 15:29:59.827484 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:00 crc kubenswrapper[5081]: I1003 15:30:00.827153 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:00 crc kubenswrapper[5081]: I1003 15:30:00.827285 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:00 crc kubenswrapper[5081]: I1003 15:30:00.827153 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:00 crc kubenswrapper[5081]: E1003 15:30:00.827376 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:00 crc kubenswrapper[5081]: E1003 15:30:00.827476 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:00 crc kubenswrapper[5081]: E1003 15:30:00.827619 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:01 crc kubenswrapper[5081]: E1003 15:30:01.806588 5081 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 03 15:30:01 crc kubenswrapper[5081]: I1003 15:30:01.827288 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:01 crc kubenswrapper[5081]: E1003 15:30:01.829134 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:01 crc kubenswrapper[5081]: E1003 15:30:01.948720 5081 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 15:30:02 crc kubenswrapper[5081]: I1003 15:30:02.827177 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:02 crc kubenswrapper[5081]: I1003 15:30:02.827274 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:02 crc kubenswrapper[5081]: I1003 15:30:02.827224 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:02 crc kubenswrapper[5081]: E1003 15:30:02.827455 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:02 crc kubenswrapper[5081]: E1003 15:30:02.827662 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:02 crc kubenswrapper[5081]: E1003 15:30:02.827759 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:03 crc kubenswrapper[5081]: I1003 15:30:03.827765 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:03 crc kubenswrapper[5081]: E1003 15:30:03.828020 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:04 crc kubenswrapper[5081]: I1003 15:30:04.827639 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:04 crc kubenswrapper[5081]: I1003 15:30:04.827763 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:04 crc kubenswrapper[5081]: E1003 15:30:04.827836 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:04 crc kubenswrapper[5081]: I1003 15:30:04.827789 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:04 crc kubenswrapper[5081]: E1003 15:30:04.828016 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:04 crc kubenswrapper[5081]: E1003 15:30:04.828157 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:05 crc kubenswrapper[5081]: I1003 15:30:05.826757 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:05 crc kubenswrapper[5081]: E1003 15:30:05.827028 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:05 crc kubenswrapper[5081]: I1003 15:30:05.828461 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:30:06 crc kubenswrapper[5081]: I1003 15:30:06.522290 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/3.log" Oct 03 15:30:06 crc kubenswrapper[5081]: I1003 15:30:06.827184 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:06 crc kubenswrapper[5081]: I1003 15:30:06.827226 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:06 crc kubenswrapper[5081]: I1003 15:30:06.827258 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:06 crc kubenswrapper[5081]: E1003 15:30:06.827376 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:06 crc kubenswrapper[5081]: E1003 15:30:06.827498 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:06 crc kubenswrapper[5081]: E1003 15:30:06.827605 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:06 crc kubenswrapper[5081]: E1003 15:30:06.955060 5081 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 15:30:07 crc kubenswrapper[5081]: I1003 15:30:07.498261 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zdszj"] Oct 03 15:30:07 crc kubenswrapper[5081]: I1003 15:30:07.535038 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/3.log" Oct 03 15:30:07 crc kubenswrapper[5081]: I1003 15:30:07.539367 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:07 crc kubenswrapper[5081]: I1003 15:30:07.539335 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerStarted","Data":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} Oct 03 15:30:07 crc kubenswrapper[5081]: E1003 15:30:07.539538 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:07 crc kubenswrapper[5081]: I1003 15:30:07.540211 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:30:07 crc kubenswrapper[5081]: I1003 15:30:07.597517 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podStartSLOduration=103.597488392 podStartE2EDuration="1m43.597488392s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:07.593964129 +0000 UTC m=+126.559520772" watchObservedRunningTime="2025-10-03 15:30:07.597488392 +0000 UTC m=+126.563045045" Oct 03 15:30:07 crc kubenswrapper[5081]: I1003 15:30:07.827293 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:07 crc kubenswrapper[5081]: E1003 15:30:07.827501 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:08 crc kubenswrapper[5081]: I1003 15:30:08.827101 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:08 crc kubenswrapper[5081]: I1003 15:30:08.827101 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:08 crc kubenswrapper[5081]: E1003 15:30:08.827511 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:08 crc kubenswrapper[5081]: E1003 15:30:08.827610 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:09 crc kubenswrapper[5081]: I1003 15:30:09.827739 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:09 crc kubenswrapper[5081]: I1003 15:30:09.828260 5081 scope.go:117] "RemoveContainer" containerID="b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e" Oct 03 15:30:09 crc kubenswrapper[5081]: I1003 15:30:09.828499 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:09 crc kubenswrapper[5081]: E1003 15:30:09.828716 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:09 crc kubenswrapper[5081]: E1003 15:30:09.828965 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:10 crc kubenswrapper[5081]: I1003 15:30:10.555869 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/1.log" Oct 03 15:30:10 crc kubenswrapper[5081]: I1003 15:30:10.826817 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:10 crc kubenswrapper[5081]: I1003 15:30:10.826947 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:10 crc kubenswrapper[5081]: E1003 15:30:10.827002 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:10 crc kubenswrapper[5081]: E1003 15:30:10.827178 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:11 crc kubenswrapper[5081]: I1003 15:30:11.562479 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/1.log" Oct 03 15:30:11 crc kubenswrapper[5081]: I1003 15:30:11.562571 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerStarted","Data":"ad25add9e1a27cf97894e382b0a37902bcef22aaa3f43e28a432ee4577a42d31"} Oct 03 15:30:11 crc kubenswrapper[5081]: I1003 15:30:11.827011 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:11 crc kubenswrapper[5081]: I1003 15:30:11.827091 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:11 crc kubenswrapper[5081]: E1003 15:30:11.828577 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:11 crc kubenswrapper[5081]: E1003 15:30:11.828721 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:11 crc kubenswrapper[5081]: E1003 15:30:11.955744 5081 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 15:30:12 crc kubenswrapper[5081]: I1003 15:30:12.827106 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:12 crc kubenswrapper[5081]: I1003 15:30:12.827106 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:12 crc kubenswrapper[5081]: E1003 15:30:12.827384 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:12 crc kubenswrapper[5081]: E1003 15:30:12.827716 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:13 crc kubenswrapper[5081]: I1003 15:30:13.826852 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:13 crc kubenswrapper[5081]: I1003 15:30:13.826851 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:13 crc kubenswrapper[5081]: E1003 15:30:13.827128 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:13 crc kubenswrapper[5081]: E1003 15:30:13.827369 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:14 crc kubenswrapper[5081]: I1003 15:30:14.827172 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:14 crc kubenswrapper[5081]: I1003 15:30:14.827280 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:14 crc kubenswrapper[5081]: E1003 15:30:14.827370 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:14 crc kubenswrapper[5081]: E1003 15:30:14.827522 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:15 crc kubenswrapper[5081]: I1003 15:30:15.826898 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:15 crc kubenswrapper[5081]: E1003 15:30:15.827487 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 15:30:15 crc kubenswrapper[5081]: I1003 15:30:15.826958 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:15 crc kubenswrapper[5081]: E1003 15:30:15.828096 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zdszj" podUID="660149c8-a5c7-4581-abae-89611dafa042" Oct 03 15:30:16 crc kubenswrapper[5081]: I1003 15:30:16.826948 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:16 crc kubenswrapper[5081]: E1003 15:30:16.827142 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 15:30:16 crc kubenswrapper[5081]: I1003 15:30:16.826990 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:16 crc kubenswrapper[5081]: E1003 15:30:16.828025 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 15:30:17 crc kubenswrapper[5081]: I1003 15:30:17.827615 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:17 crc kubenswrapper[5081]: I1003 15:30:17.827615 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:17 crc kubenswrapper[5081]: I1003 15:30:17.832403 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 03 15:30:17 crc kubenswrapper[5081]: I1003 15:30:17.834160 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 03 15:30:17 crc kubenswrapper[5081]: I1003 15:30:17.834394 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 03 15:30:17 crc kubenswrapper[5081]: I1003 15:30:17.834477 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 03 15:30:18 crc kubenswrapper[5081]: I1003 15:30:18.826760 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:18 crc kubenswrapper[5081]: I1003 15:30:18.826897 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:18 crc kubenswrapper[5081]: I1003 15:30:18.829123 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 03 15:30:18 crc kubenswrapper[5081]: I1003 15:30:18.830155 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.478897 5081 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.524869 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ntvnz"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.525588 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.526674 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cgxc7"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.527364 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.528849 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7npb2"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.529259 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.534267 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-48rh6"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.534909 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.535018 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6lbmc"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.539015 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7v9g5"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.539239 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.548103 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-pz8gv"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.548962 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.549135 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7v9g5" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.553297 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553366 5081 reflector.go:561] object-"openshift-console-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.553407 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553422 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553436 5081 reflector.go:561] object-"openshift-console-operator"/"trusted-ca": failed to list *v1.ConfigMap: configmaps "trusted-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553499 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"trusted-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553491 5081 reflector.go:561] object-"openshift-dns-operator"/"metrics-tls": failed to list *v1.Secret: secrets "metrics-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553522 5081 reflector.go:561] object-"openshift-console-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553400 5081 reflector.go:561] object-"openshift-dns-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553590 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.553606 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553607 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553313 5081 reflector.go:561] object-"openshift-console-operator"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553535 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns-operator\"/\"metrics-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"metrics-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553648 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553306 5081 reflector.go:561] object-"openshift-authentication-operator"/"service-ca-bundle": failed to list *v1.ConfigMap: configmaps "service-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553676 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"service-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"service-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553419 5081 reflector.go:561] object-"openshift-authentication-operator"/"trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553706 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553734 5081 reflector.go:561] object-"openshift-authentication-operator"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.553751 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553766 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.553802 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553813 5081 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-config": failed to list *v1.ConfigMap: configmaps "authentication-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.553939 5081 reflector.go:561] object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5": failed to list *v1.Secret: secrets "dns-operator-dockercfg-9mqw5" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.553974 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns-operator\"/\"dns-operator-dockercfg-9mqw5\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"dns-operator-dockercfg-9mqw5\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.554182 5081 reflector.go:561] object-"openshift-dns-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.554245 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.554325 5081 reflector.go:561] object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr": failed to list *v1.Secret: secrets "console-operator-dockercfg-4xjcr" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.554341 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"console-operator-dockercfg-4xjcr\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"console-operator-dockercfg-4xjcr\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.554532 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.554731 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.554814 5081 reflector.go:561] object-"openshift-authentication-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.554845 5081 reflector.go:561] object-"openshift-authentication-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.554774 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.554864 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.554840 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.554901 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.554773 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"authentication-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.554965 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.555120 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.555277 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.555336 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.555291 5081 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj": failed to list *v1.Secret: secrets "authentication-operator-dockercfg-mz9bj" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.555301 5081 reflector.go:561] object-"openshift-console-operator"/"console-operator-config": failed to list *v1.ConfigMap: configmaps "console-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.562664 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-dockercfg-mz9bj\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"authentication-operator-dockercfg-mz9bj\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.555469 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"console-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"console-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.566410 5081 reflector.go:561] object-"openshift-apiserver"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.569922 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.570309 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.569411 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.572770 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.573335 5081 reflector.go:561] object-"openshift-console"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.580277 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.570796 5081 reflector.go:561] object-"openshift-console"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.580385 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.582533 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.583107 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.583388 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.583612 5081 reflector.go:561] object-"openshift-apiserver"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.583638 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.583657 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.583707 5081 reflector.go:561] object-"openshift-apiserver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.583719 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.583776 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.583852 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.583931 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.583956 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.591916 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.593359 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.593613 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.593730 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.594591 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.595671 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.602023 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.602865 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.603672 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 03 15:30:22 crc kubenswrapper[5081]: W1003 15:30:22.604118 5081 reflector.go:561] object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff": failed to list *v1.Secret: secrets "openshift-apiserver-sa-dockercfg-djjff" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Oct 03 15:30:22 crc kubenswrapper[5081]: E1003 15:30:22.604172 5081 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"openshift-apiserver-sa-dockercfg-djjff\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-sa-dockercfg-djjff\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.607115 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.607917 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.608124 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.608703 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.609114 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.609954 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.609955 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.611436 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.615927 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.617024 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.617231 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.617752 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.617760 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.617758 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.618019 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.618142 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.618212 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.618314 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.621310 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.621362 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-56p4s"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.621596 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.621991 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.622065 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.631099 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.631333 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.631546 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.631700 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.633440 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.633653 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.633799 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.633874 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.640230 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-hr5zj"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.641496 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.643800 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.643902 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.644110 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.644174 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.644114 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.644268 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.645256 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.647451 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.648468 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.672809 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-qksdj"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.673529 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.674719 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.675248 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-twgch"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.675421 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.675616 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.675691 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.675854 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.675974 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.678237 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.678316 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.678836 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.679628 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.679670 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.679634 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.679924 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.680006 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.680145 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.680273 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.683311 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.683444 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.684072 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.684922 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.685209 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.685869 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hr9b\" (UniqueName: \"kubernetes.io/projected/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-kube-api-access-7hr9b\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.685903 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.685944 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.685964 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcvhq\" (UniqueName: \"kubernetes.io/projected/713bccb3-37e5-4795-8edf-ed3dd44bb275-kube-api-access-gcvhq\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.685974 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.685985 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686127 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686146 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c9c368d9-2375-4cef-8476-ce97bf83ab77-audit-dir\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686175 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-etcd-client\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686193 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c7d19f3e-957e-4642-8ada-19424fca2e00-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686210 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686228 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686246 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-metrics-tls\") pod \"dns-operator-744455d44c-7npb2\" (UID: \"3c37d766-5afd-4f91-a1bc-ec39345c9a5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686266 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-267mt\" (UniqueName: \"kubernetes.io/projected/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-kube-api-access-267mt\") pod \"dns-operator-744455d44c-7npb2\" (UID: \"3c37d766-5afd-4f91-a1bc-ec39345c9a5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686304 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686321 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-encryption-config\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686345 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-client-ca\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686368 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686397 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzx7g\" (UniqueName: \"kubernetes.io/projected/f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68-kube-api-access-lzx7g\") pod \"downloads-7954f5f757-7v9g5\" (UID: \"f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68\") " pod="openshift-console/downloads-7954f5f757-7v9g5" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686486 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-encryption-config\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686530 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686639 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-config\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686715 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686810 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-service-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686845 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-config\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686874 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-config\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686913 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-policies\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.686967 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8da083df-54d4-496a-81b6-52afe3a0c4e9-serving-cert\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687113 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-etcd-client\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687169 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-audit-policies\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687211 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687238 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687263 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c9c368d9-2375-4cef-8476-ce97bf83ab77-node-pullsecrets\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687366 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7d19f3e-957e-4642-8ada-19424fca2e00-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687401 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7d19f3e-957e-4642-8ada-19424fca2e00-config\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687430 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687469 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlwsf\" (UniqueName: \"kubernetes.io/projected/8da083df-54d4-496a-81b6-52afe3a0c4e9-kube-api-access-zlwsf\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687501 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687498 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687616 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687702 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q84db\" (UniqueName: \"kubernetes.io/projected/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-kube-api-access-q84db\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687735 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/713bccb3-37e5-4795-8edf-ed3dd44bb275-audit-dir\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687759 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7z2z\" (UniqueName: \"kubernetes.io/projected/c9c368d9-2375-4cef-8476-ce97bf83ab77-kube-api-access-q7z2z\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687875 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-dir\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687903 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-etcd-serving-ca\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687923 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-trusted-ca\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687954 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-serving-cert\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.687978 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlds9\" (UniqueName: \"kubernetes.io/projected/86d5b7db-0c22-4446-9bff-2ff5493f9288-kube-api-access-jlds9\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.688000 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-audit\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.688064 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-serving-cert\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.688086 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-serving-cert\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.688105 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.688130 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-config\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.688147 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-image-import-ca\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.688171 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-serving-cert\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.690254 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.690872 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.701154 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.701890 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vk4jz"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.702249 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.702847 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.707004 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.707376 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.710354 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.713487 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.713703 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.713854 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.714263 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.714368 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.715999 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.716808 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.717214 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.717609 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.717795 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.717903 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.718331 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.719153 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.719743 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.719830 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.720154 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.719928 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.721180 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.721285 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.721679 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.721764 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8p57q"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.722090 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.727660 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.733443 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.739739 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.741479 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qkzll"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.746660 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.746916 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.750128 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-qk55l"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.750332 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.753785 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7npb2"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.753969 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.763723 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-smss4"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.764959 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.766330 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.769157 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ntvnz"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.769302 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.769420 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.773359 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7v9g5"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.775746 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-48rh6"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.779088 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cgxc7"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.780208 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.781374 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.784871 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-clvgs"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.785919 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.785947 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.786411 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.787552 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788084 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788802 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788842 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788868 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788891 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c9c368d9-2375-4cef-8476-ce97bf83ab77-audit-dir\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788918 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-etcd-client\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788938 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c7d19f3e-957e-4642-8ada-19424fca2e00-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788961 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.788980 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-metrics-tls\") pod \"dns-operator-744455d44c-7npb2\" (UID: \"3c37d766-5afd-4f91-a1bc-ec39345c9a5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789002 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-267mt\" (UniqueName: \"kubernetes.io/projected/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-kube-api-access-267mt\") pod \"dns-operator-744455d44c-7npb2\" (UID: \"3c37d766-5afd-4f91-a1bc-ec39345c9a5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789025 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-encryption-config\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789046 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789065 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789085 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-client-ca\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789107 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789126 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzx7g\" (UniqueName: \"kubernetes.io/projected/f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68-kube-api-access-lzx7g\") pod \"downloads-7954f5f757-7v9g5\" (UID: \"f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68\") " pod="openshift-console/downloads-7954f5f757-7v9g5" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789145 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-encryption-config\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789163 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789181 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-config\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789202 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789224 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-service-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789242 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-policies\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789260 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-config\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789280 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-config\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789300 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8da083df-54d4-496a-81b6-52afe3a0c4e9-serving-cert\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789324 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-etcd-client\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789342 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-audit-policies\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789361 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789577 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789606 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c9c368d9-2375-4cef-8476-ce97bf83ab77-node-pullsecrets\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789637 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789661 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7d19f3e-957e-4642-8ada-19424fca2e00-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789688 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7d19f3e-957e-4642-8ada-19424fca2e00-config\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789710 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlwsf\" (UniqueName: \"kubernetes.io/projected/8da083df-54d4-496a-81b6-52afe3a0c4e9-kube-api-access-zlwsf\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789732 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789760 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q84db\" (UniqueName: \"kubernetes.io/projected/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-kube-api-access-q84db\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789788 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/713bccb3-37e5-4795-8edf-ed3dd44bb275-audit-dir\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789811 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7z2z\" (UniqueName: \"kubernetes.io/projected/c9c368d9-2375-4cef-8476-ce97bf83ab77-kube-api-access-q7z2z\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789844 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-dir\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789864 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-etcd-serving-ca\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789886 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-trusted-ca\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789909 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-serving-cert\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789933 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlds9\" (UniqueName: \"kubernetes.io/projected/86d5b7db-0c22-4446-9bff-2ff5493f9288-kube-api-access-jlds9\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789956 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-audit\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.789980 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-serving-cert\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790001 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-serving-cert\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790030 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790046 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-serving-cert\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790065 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-config\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790083 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-image-import-ca\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790103 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hr9b\" (UniqueName: \"kubernetes.io/projected/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-kube-api-access-7hr9b\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790122 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790150 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.790165 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcvhq\" (UniqueName: \"kubernetes.io/projected/713bccb3-37e5-4795-8edf-ed3dd44bb275-kube-api-access-gcvhq\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.791000 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.791006 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.791057 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c9c368d9-2375-4cef-8476-ce97bf83ab77-node-pullsecrets\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.791729 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.792259 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-hr5zj"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.792300 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-pz8gv"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.792359 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-dir\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.792408 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/713bccb3-37e5-4795-8edf-ed3dd44bb275-audit-dir\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.792804 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c9c368d9-2375-4cef-8476-ce97bf83ab77-audit-dir\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.793751 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-policies\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.795428 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-image-import-ca\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.796713 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-config\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.796762 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-audit-policies\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.796883 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-etcd-serving-ca\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.797266 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-audit\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.797300 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/713bccb3-37e5-4795-8edf-ed3dd44bb275-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.798096 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7d19f3e-957e-4642-8ada-19424fca2e00-config\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.798532 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.799136 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.799298 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.800250 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-client-ca\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.800419 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.800492 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.801147 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7d19f3e-957e-4642-8ada-19424fca2e00-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.802478 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.802548 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6lbmc"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.803258 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.804228 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.804396 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.804448 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.804464 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-serving-cert\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.804542 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-etcd-client\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.805024 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-etcd-client\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.805198 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.805643 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-encryption-config\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.806825 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-56p4s"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.808020 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-encryption-config\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.808149 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.808792 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.810084 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-twgch"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.811149 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/713bccb3-37e5-4795-8edf-ed3dd44bb275-serving-cert\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.811277 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.812235 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.813135 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.813384 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.814458 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.815516 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.816579 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.817699 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.819452 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.820776 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-qk55l"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.821795 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.822775 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.823760 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-clvgs"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.826646 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qkzll"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.827714 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-smss4"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.828806 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.842517 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vk4jz"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.844778 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-4csfn"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.845705 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.847668 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.847998 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4csfn"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.849922 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.851612 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8p57q"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.852635 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.855451 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-9ksc9"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.856620 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.856793 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.856811 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.857295 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-c8w4s"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.858038 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.858869 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9ksc9"] Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.868931 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.887933 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.908749 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.928311 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.948074 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.967284 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 03 15:30:22 crc kubenswrapper[5081]: I1003 15:30:22.995709 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.008199 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.029058 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.048923 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.068272 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.088842 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.108708 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.128995 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.148793 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.168909 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.208763 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.228535 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.249099 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.268172 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.289160 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.308708 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.328809 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.349079 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.368811 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.387856 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.408237 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.430117 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.449054 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.469235 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.488962 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.508452 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.528533 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.548504 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.568446 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.589264 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.608365 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.628342 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.648385 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.668738 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.689696 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.709151 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.726203 5081 request.go:700] Waited for 1.005584737s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.728907 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.748324 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.768513 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.788329 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.793961 5081 secret.go:188] Couldn't get secret openshift-console-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794114 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-serving-cert podName:e603b6d2-229f-4655-a40b-ae18d1fe0d5b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.294059107 +0000 UTC m=+143.259615720 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-serving-cert") pod "console-operator-58897d9998-48rh6" (UID: "e603b6d2-229f-4655-a40b-ae18d1fe0d5b") : failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794293 5081 secret.go:188] Couldn't get secret openshift-apiserver/serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794346 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-serving-cert podName:c9c368d9-2375-4cef-8476-ce97bf83ab77 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.294332065 +0000 UTC m=+143.259888678 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-serving-cert") pod "apiserver-76f77b778f-pz8gv" (UID: "c9c368d9-2375-4cef-8476-ce97bf83ab77") : failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794460 5081 configmap.go:193] Couldn't get configMap openshift-authentication-operator/authentication-operator-config: failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794492 5081 configmap.go:193] Couldn't get configMap openshift-apiserver/config: failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794511 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-config podName:8da083df-54d4-496a-81b6-52afe3a0c4e9 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.29449893 +0000 UTC m=+143.260055543 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-config") pod "authentication-operator-69f744f599-6lbmc" (UID: "8da083df-54d4-496a-81b6-52afe3a0c4e9") : failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794530 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-config podName:c9c368d9-2375-4cef-8476-ce97bf83ab77 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.294520741 +0000 UTC m=+143.260077354 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-config") pod "apiserver-76f77b778f-pz8gv" (UID: "c9c368d9-2375-4cef-8476-ce97bf83ab77") : failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794546 5081 configmap.go:193] Couldn't get configMap openshift-console-operator/console-operator-config: failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794597 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-config podName:e603b6d2-229f-4655-a40b-ae18d1fe0d5b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.294587543 +0000 UTC m=+143.260144156 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-config") pod "console-operator-58897d9998-48rh6" (UID: "e603b6d2-229f-4655-a40b-ae18d1fe0d5b") : failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794597 5081 secret.go:188] Couldn't get secret openshift-authentication-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794637 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8da083df-54d4-496a-81b6-52afe3a0c4e9-serving-cert podName:8da083df-54d4-496a-81b6-52afe3a0c4e9 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.294628174 +0000 UTC m=+143.260184787 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/8da083df-54d4-496a-81b6-52afe3a0c4e9-serving-cert") pod "authentication-operator-69f744f599-6lbmc" (UID: "8da083df-54d4-496a-81b6-52afe3a0c4e9") : failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.794845 5081 configmap.go:193] Couldn't get configMap openshift-console-operator/trusted-ca: failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.795009 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-trusted-ca podName:e603b6d2-229f-4655-a40b-ae18d1fe0d5b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.294981915 +0000 UTC m=+143.260538518 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca" (UniqueName: "kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-trusted-ca") pod "console-operator-58897d9998-48rh6" (UID: "e603b6d2-229f-4655-a40b-ae18d1fe0d5b") : failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.795021 5081 configmap.go:193] Couldn't get configMap openshift-authentication-operator/service-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.795159 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-service-ca-bundle podName:8da083df-54d4-496a-81b6-52afe3a0c4e9 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.2951502 +0000 UTC m=+143.260706803 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "service-ca-bundle" (UniqueName: "kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-service-ca-bundle") pod "authentication-operator-69f744f599-6lbmc" (UID: "8da083df-54d4-496a-81b6-52afe3a0c4e9") : failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.795775 5081 secret.go:188] Couldn't get secret openshift-dns-operator/metrics-tls: failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.795858 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-metrics-tls podName:3c37d766-5afd-4f91-a1bc-ec39345c9a5b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.29584659 +0000 UTC m=+143.261403393 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-metrics-tls") pod "dns-operator-744455d44c-7npb2" (UID: "3c37d766-5afd-4f91-a1bc-ec39345c9a5b") : failed to sync secret cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.800684 5081 configmap.go:193] Couldn't get configMap openshift-authentication-operator/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: E1003 15:30:23.800745 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-trusted-ca-bundle podName:8da083df-54d4-496a-81b6-52afe3a0c4e9 nodeName:}" failed. No retries permitted until 2025-10-03 15:30:24.300723935 +0000 UTC m=+143.266280548 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-trusted-ca-bundle") pod "authentication-operator-69f744f599-6lbmc" (UID: "8da083df-54d4-496a-81b6-52afe3a0c4e9") : failed to sync configmap cache: timed out waiting for the condition Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.807398 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.828359 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.848686 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.868267 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.887666 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.907950 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.934704 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.947929 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.968548 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 03 15:30:23 crc kubenswrapper[5081]: I1003 15:30:23.988306 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.008622 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.027792 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.047964 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.068303 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.088400 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.108882 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.128145 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.148276 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.168249 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.187631 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.227907 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.248144 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.267981 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.287976 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306359 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-serving-cert\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306402 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-serving-cert\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306489 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-metrics-tls\") pod \"dns-operator-744455d44c-7npb2\" (UID: \"3c37d766-5afd-4f91-a1bc-ec39345c9a5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306547 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306767 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-config\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306791 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-service-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306814 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-config\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306833 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-config\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306855 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8da083df-54d4-496a-81b6-52afe3a0c4e9-serving-cert\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.306990 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-trusted-ca\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.307925 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.328985 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.348296 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.368033 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.387341 5081 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.407988 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.442829 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcvhq\" (UniqueName: \"kubernetes.io/projected/713bccb3-37e5-4795-8edf-ed3dd44bb275-kube-api-access-gcvhq\") pod \"apiserver-7bbb656c7d-7gf65\" (UID: \"713bccb3-37e5-4795-8edf-ed3dd44bb275\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.464676 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q84db\" (UniqueName: \"kubernetes.io/projected/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-kube-api-access-q84db\") pod \"controller-manager-879f6c89f-cgxc7\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.478698 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.537048 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlds9\" (UniqueName: \"kubernetes.io/projected/86d5b7db-0c22-4446-9bff-2ff5493f9288-kube-api-access-jlds9\") pod \"oauth-openshift-558db77b4-ntvnz\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.551680 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c7d19f3e-957e-4642-8ada-19424fca2e00-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-84jgh\" (UID: \"c7d19f3e-957e-4642-8ada-19424fca2e00\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.608477 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.629661 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.644833 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.648264 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.667997 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.682712 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.688363 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.704670 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65"] Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.709211 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.728643 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.746920 5081 request.go:700] Waited for 1.888660956s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-dockercfg-qx5rd&limit=500&resourceVersion=0 Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.749762 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.768431 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.788654 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.795362 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814481 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-registry-tls\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814541 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-metrics-tls\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814586 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-trusted-ca\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814757 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rchl7\" (UniqueName: \"kubernetes.io/projected/7312c8b5-d029-4dad-99e8-d90247bb08a1-kube-api-access-rchl7\") pod \"migrator-59844c95c7-49td8\" (UID: \"7312c8b5-d029-4dad-99e8-d90247bb08a1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814811 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-service-ca\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814850 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-trusted-ca-bundle\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814906 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrj5c\" (UniqueName: \"kubernetes.io/projected/dbec332a-71ca-4cf0-9c06-9f95853911b7-kube-api-access-nrj5c\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814946 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86928421-ee7e-4823-9483-80d3d4855283-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814971 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-metrics-certs\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.814999 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-service-ca\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815042 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-oauth-serving-cert\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815079 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815121 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-config\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815151 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c418f3b3-41e5-4185-84f7-22f8dd9c5431-serving-cert\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815222 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-config\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815267 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7zjx\" (UniqueName: \"kubernetes.io/projected/68d4e48c-5240-4e89-84cf-c837158cc6b8-kube-api-access-p7zjx\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815309 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-registry-certificates\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815326 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-console-config\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815342 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhz5p\" (UniqueName: \"kubernetes.io/projected/87f6080d-917b-4d20-a744-9fb3bad43a77-kube-api-access-jhz5p\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815385 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zm55\" (UniqueName: \"kubernetes.io/projected/7ae3b38e-2c09-4945-abb8-888051f4132a-kube-api-access-7zm55\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815421 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ae3b38e-2c09-4945-abb8-888051f4132a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815440 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5mw4\" (UniqueName: \"kubernetes.io/projected/c418f3b3-41e5-4185-84f7-22f8dd9c5431-kube-api-access-b5mw4\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815456 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-client\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815481 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbec332a-71ca-4cf0-9c06-9f95853911b7-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815511 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d4e48c-5240-4e89-84cf-c837158cc6b8-config\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: E1003 15:30:24.815541 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.315518075 +0000 UTC m=+144.281074688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815638 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snkrl\" (UniqueName: \"kubernetes.io/projected/72843d3c-0fcf-4436-9516-164af96ca830-kube-api-access-snkrl\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815670 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-client-ca\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815708 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72843d3c-0fcf-4436-9516-164af96ca830-service-ca-bundle\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815729 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-ca\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815793 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/68d4e48c-5240-4e89-84cf-c837158cc6b8-auth-proxy-config\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815851 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2dc6fe42-5c34-4d22-a348-d5dd521247c1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815877 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whzhq\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-kube-api-access-whzhq\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815941 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ae3b38e-2c09-4945-abb8-888051f4132a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.815979 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgxw9\" (UniqueName: \"kubernetes.io/projected/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-kube-api-access-kgxw9\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816005 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2dc6fe42-5c34-4d22-a348-d5dd521247c1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816037 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-stats-auth\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816058 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816077 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tx5q\" (UniqueName: \"kubernetes.io/projected/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-kube-api-access-5tx5q\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816097 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86928421-ee7e-4823-9483-80d3d4855283-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816119 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dc6fe42-5c34-4d22-a348-d5dd521247c1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816149 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-trusted-ca\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816168 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-bound-sa-token\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816217 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srpxd\" (UniqueName: \"kubernetes.io/projected/2dc6fe42-5c34-4d22-a348-d5dd521247c1-kube-api-access-srpxd\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816235 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-default-certificate\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816262 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/68d4e48c-5240-4e89-84cf-c837158cc6b8-machine-approver-tls\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816312 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-oauth-config\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816331 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbec332a-71ca-4cf0-9c06-9f95853911b7-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816351 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-config\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816374 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-serving-cert\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816398 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-serving-cert\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.816417 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hmv9\" (UniqueName: \"kubernetes.io/projected/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-kube-api-access-4hmv9\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.829362 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.841422 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-config\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.857027 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.860220 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.869637 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.889037 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.893606 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cgxc7"] Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.898486 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9c368d9-2375-4cef-8476-ce97bf83ab77-config\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.908841 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 03 15:30:24 crc kubenswrapper[5081]: W1003 15:30:24.909470 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69e0e14b_c635_4027_b3ac_f89fd4d71f1b.slice/crio-2a5984f8bb8d5dee706b85552593e8a012bf69f5d42ae326f623bdab1d2d6eee WatchSource:0}: Error finding container 2a5984f8bb8d5dee706b85552593e8a012bf69f5d42ae326f623bdab1d2d6eee: Status 404 returned error can't find the container with id 2a5984f8bb8d5dee706b85552593e8a012bf69f5d42ae326f623bdab1d2d6eee Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.916719 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.916804 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-default-certificate\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.916839 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-certs\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.916859 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-oauth-config\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.916880 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbec332a-71ca-4cf0-9c06-9f95853911b7-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.916901 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-config\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.916918 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-serving-cert\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: E1003 15:30:24.916984 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.41695256 +0000 UTC m=+144.382509183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918140 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbec332a-71ca-4cf0-9c06-9f95853911b7-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918177 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-registry-tls\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918230 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-metrics-tls\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918261 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dc15d380-65e8-493c-9e8d-2fbec9f058be-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918194 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-config\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918298 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rchl7\" (UniqueName: \"kubernetes.io/projected/7312c8b5-d029-4dad-99e8-d90247bb08a1-kube-api-access-rchl7\") pod \"migrator-59844c95c7-49td8\" (UID: \"7312c8b5-d029-4dad-99e8-d90247bb08a1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918336 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq6dz\" (UniqueName: \"kubernetes.io/projected/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-kube-api-access-nq6dz\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918364 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-trusted-ca-bundle\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918389 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-proxy-tls\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918416 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-service-ca\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.918498 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrj5c\" (UniqueName: \"kubernetes.io/projected/dbec332a-71ca-4cf0-9c06-9f95853911b7-kube-api-access-nrj5c\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.919451 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-service-ca\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.919578 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4992f05a-4ae9-4d48-a041-ed3d3f819b94-tmpfs\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.919633 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86928421-ee7e-4823-9483-80d3d4855283-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.919674 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-metrics-certs\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.919722 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk28j\" (UniqueName: \"kubernetes.io/projected/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-kube-api-access-qk28j\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920178 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/34fcb740-58cd-4b32-aae6-8381a956ab5d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rwpg9\" (UID: \"34fcb740-58cd-4b32-aae6-8381a956ab5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920268 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-registration-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920332 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-config\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920365 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/255659df-1859-49fe-8ced-0d05be9c5c4e-metrics-tls\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920398 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g522k\" (UniqueName: \"kubernetes.io/projected/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-kube-api-access-g522k\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920431 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-images\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920438 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86928421-ee7e-4823-9483-80d3d4855283-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920463 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86nc9\" (UniqueName: \"kubernetes.io/projected/4992f05a-4ae9-4d48-a041-ed3d3f819b94-kube-api-access-86nc9\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920609 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7zjx\" (UniqueName: \"kubernetes.io/projected/68d4e48c-5240-4e89-84cf-c837158cc6b8-kube-api-access-p7zjx\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920673 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c45a715-d81c-4b98-a687-e42af0efee14-serving-cert\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920707 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920736 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-trusted-ca-bundle\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920755 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06ab3f86-8279-44ee-a948-def749e60667-config\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920787 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8vfl\" (UniqueName: \"kubernetes.io/projected/255659df-1859-49fe-8ced-0d05be9c5c4e-kube-api-access-t8vfl\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920829 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c45a715-d81c-4b98-a687-e42af0efee14-config\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920874 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5mw4\" (UniqueName: \"kubernetes.io/projected/c418f3b3-41e5-4185-84f7-22f8dd9c5431-kube-api-access-b5mw4\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920902 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-client\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920945 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b3f291d5-f475-4f1d-9291-897465bb6cd7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920969 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4992f05a-4ae9-4d48-a041-ed3d3f819b94-apiservice-cert\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.920995 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d4e48c-5240-4e89-84cf-c837158cc6b8-config\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921021 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-client-ca\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921043 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4992f05a-4ae9-4d48-a041-ed3d3f819b94-webhook-cert\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921089 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-config\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921138 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72843d3c-0fcf-4436-9516-164af96ca830-service-ca-bundle\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921166 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvvw5\" (UniqueName: \"kubernetes.io/projected/34fcb740-58cd-4b32-aae6-8381a956ab5d-kube-api-access-rvvw5\") pod \"package-server-manager-789f6589d5-rwpg9\" (UID: \"34fcb740-58cd-4b32-aae6-8381a956ab5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921209 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/68d4e48c-5240-4e89-84cf-c837158cc6b8-auth-proxy-config\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921239 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5624e6ea-f7c1-40bf-8463-45773373945d-srv-cert\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921265 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/48df3d15-6274-47b2-a109-9c9834e35563-srv-cert\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921323 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5pg9\" (UniqueName: \"kubernetes.io/projected/84647337-65f5-40fe-9ee7-62f1faebb04c-kube-api-access-m5pg9\") pod \"control-plane-machine-set-operator-78cbb6b69f-lk9tv\" (UID: \"84647337-65f5-40fe-9ee7-62f1faebb04c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921358 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2dc6fe42-5c34-4d22-a348-d5dd521247c1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921386 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szkz4\" (UniqueName: \"kubernetes.io/projected/8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c-kube-api-access-szkz4\") pod \"multus-admission-controller-857f4d67dd-qkzll\" (UID: \"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921418 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921446 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-signing-key\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921475 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86928421-ee7e-4823-9483-80d3d4855283-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921506 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dc6fe42-5c34-4d22-a348-d5dd521247c1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921532 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-trusted-ca\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.921720 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d4e48c-5240-4e89-84cf-c837158cc6b8-config\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922149 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-bound-sa-token\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922221 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-csi-data-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922282 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh9xx\" (UniqueName: \"kubernetes.io/projected/a18d484e-c5e4-4460-8585-e4ccb6ec906c-kube-api-access-xh9xx\") pod \"cluster-samples-operator-665b6dd947-b59f5\" (UID: \"a18d484e-c5e4-4460-8585-e4ccb6ec906c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922316 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b4b405fa-53ce-43ea-ba96-cf08df54171c-images\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922345 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4b405fa-53ce-43ea-ba96-cf08df54171c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922407 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/68d4e48c-5240-4e89-84cf-c837158cc6b8-machine-approver-tls\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922436 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzh5x\" (UniqueName: \"kubernetes.io/projected/185c2ecc-dbb0-4666-b3ff-72f099427ccc-kube-api-access-lzh5x\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922460 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94zsq\" (UniqueName: \"kubernetes.io/projected/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-kube-api-access-94zsq\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922483 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qkzll\" (UID: \"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922523 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhtmn\" (UniqueName: \"kubernetes.io/projected/48df3d15-6274-47b2-a109-9c9834e35563-kube-api-access-mhtmn\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922547 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f291d5-f475-4f1d-9291-897465bb6cd7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922593 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-serving-cert\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922619 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hmv9\" (UniqueName: \"kubernetes.io/projected/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-kube-api-access-4hmv9\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922643 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6l2z\" (UniqueName: \"kubernetes.io/projected/24a2b864-5a51-4283-8d44-578a6d40a6ce-kube-api-access-d6l2z\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922668 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/48df3d15-6274-47b2-a109-9c9834e35563-profile-collector-cert\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922694 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-trusted-ca\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922719 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/255659df-1859-49fe-8ced-0d05be9c5c4e-config-volume\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922747 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-socket-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922771 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a18d484e-c5e4-4460-8585-e4ccb6ec906c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b59f5\" (UID: \"a18d484e-c5e4-4460-8585-e4ccb6ec906c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922805 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb8hh\" (UniqueName: \"kubernetes.io/projected/7c45a715-d81c-4b98-a687-e42af0efee14-kube-api-access-jb8hh\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922837 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922865 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/84647337-65f5-40fe-9ee7-62f1faebb04c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lk9tv\" (UID: \"84647337-65f5-40fe-9ee7-62f1faebb04c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922888 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06ab3f86-8279-44ee-a948-def749e60667-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922914 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-service-ca\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922937 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-oauth-serving-cert\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922958 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-signing-cabundle\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922989 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923014 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923039 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc15d380-65e8-493c-9e8d-2fbec9f058be-serving-cert\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923065 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46575\" (UniqueName: \"kubernetes.io/projected/b4b405fa-53ce-43ea-ba96-cf08df54171c-kube-api-access-46575\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923079 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-oauth-config\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923099 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c418f3b3-41e5-4185-84f7-22f8dd9c5431-serving-cert\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923124 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-config\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923164 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdwht\" (UniqueName: \"kubernetes.io/projected/31ab5383-1898-4964-8e8b-406b81b83fab-kube-api-access-vdwht\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923221 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923257 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-registry-certificates\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923279 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-console-config\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923304 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhz5p\" (UniqueName: \"kubernetes.io/projected/87f6080d-917b-4d20-a744-9fb3bad43a77-kube-api-access-jhz5p\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923332 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4e5bd586-f1c6-4522-af69-ee046c3bbcdb-cert\") pod \"ingress-canary-4csfn\" (UID: \"4e5bd586-f1c6-4522-af69-ee046c3bbcdb\") " pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923353 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923380 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f291d5-f475-4f1d-9291-897465bb6cd7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923409 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zm55\" (UniqueName: \"kubernetes.io/projected/7ae3b38e-2c09-4945-abb8-888051f4132a-kube-api-access-7zm55\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923433 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ae3b38e-2c09-4945-abb8-888051f4132a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923457 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24a2b864-5a51-4283-8d44-578a6d40a6ce-secret-volume\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923484 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7cnq\" (UniqueName: \"kubernetes.io/projected/5624e6ea-f7c1-40bf-8463-45773373945d-kube-api-access-s7cnq\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923508 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbec332a-71ca-4cf0-9c06-9f95853911b7-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923533 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk4zp\" (UniqueName: \"kubernetes.io/projected/dc15d380-65e8-493c-9e8d-2fbec9f058be-kube-api-access-pk4zp\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923579 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfq9q\" (UniqueName: \"kubernetes.io/projected/4e5bd586-f1c6-4522-af69-ee046c3bbcdb-kube-api-access-dfq9q\") pod \"ingress-canary-4csfn\" (UID: \"4e5bd586-f1c6-4522-af69-ee046c3bbcdb\") " pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923606 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snkrl\" (UniqueName: \"kubernetes.io/projected/72843d3c-0fcf-4436-9516-164af96ca830-kube-api-access-snkrl\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923628 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-plugins-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923653 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-ca\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923673 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24a2b864-5a51-4283-8d44-578a6d40a6ce-config-volume\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923700 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2dc6fe42-5c34-4d22-a348-d5dd521247c1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923725 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whzhq\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-kube-api-access-whzhq\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923757 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ae3b38e-2c09-4945-abb8-888051f4132a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923777 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5624e6ea-f7c1-40bf-8463-45773373945d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923855 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgxw9\" (UniqueName: \"kubernetes.io/projected/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-kube-api-access-kgxw9\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923882 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tx5q\" (UniqueName: \"kubernetes.io/projected/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-kube-api-access-5tx5q\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923908 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-node-bootstrap-token\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923930 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06ab3f86-8279-44ee-a948-def749e60667-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923919 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-default-certificate\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.923958 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-stats-auth\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.924161 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b4b405fa-53ce-43ea-ba96-cf08df54171c-proxy-tls\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.924197 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-config\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.924249 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srpxd\" (UniqueName: \"kubernetes.io/projected/2dc6fe42-5c34-4d22-a348-d5dd521247c1-kube-api-access-srpxd\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.924283 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-mountpoint-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.924518 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2dc6fe42-5c34-4d22-a348-d5dd521247c1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.924645 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/68d4e48c-5240-4e89-84cf-c837158cc6b8-auth-proxy-config\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.922160 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-client-ca\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.925855 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-serving-cert\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.927912 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-trusted-ca\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.927985 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-registry-tls\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.928359 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72843d3c-0fcf-4436-9516-164af96ca830-service-ca-bundle\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.929021 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-service-ca\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.929628 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ae3b38e-2c09-4945-abb8-888051f4132a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.929654 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-registry-certificates\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.929706 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-stats-auth\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.929976 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-console-config\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: E1003 15:30:24.930164 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.430146401 +0000 UTC m=+144.395703004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.930385 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbec332a-71ca-4cf0-9c06-9f95853911b7-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.930400 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-oauth-serving-cert\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.930782 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dc6fe42-5c34-4d22-a348-d5dd521247c1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.930803 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-ca\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.931259 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-config\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.931485 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.931941 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ae3b38e-2c09-4945-abb8-888051f4132a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.932399 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/68d4e48c-5240-4e89-84cf-c837158cc6b8-machine-approver-tls\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.933452 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72843d3c-0fcf-4436-9516-164af96ca830-metrics-certs\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.935384 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86928421-ee7e-4823-9483-80d3d4855283-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.935516 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.937726 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-etcd-client\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.940209 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-trusted-ca\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.941020 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-serving-cert\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.945009 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8da083df-54d4-496a-81b6-52afe3a0c4e9-serving-cert\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.945186 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c418f3b3-41e5-4185-84f7-22f8dd9c5431-serving-cert\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.945601 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-metrics-tls\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.949457 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.959129 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-config\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.969000 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.980062 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh"] Oct 03 15:30:24 crc kubenswrapper[5081]: I1003 15:30:24.989224 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.007812 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.024321 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ntvnz"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.025550 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.025700 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.52566195 +0000 UTC m=+144.491218563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.025912 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq6dz\" (UniqueName: \"kubernetes.io/projected/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-kube-api-access-nq6dz\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.025953 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dc15d380-65e8-493c-9e8d-2fbec9f058be-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.025972 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-proxy-tls\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.025994 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4992f05a-4ae9-4d48-a041-ed3d3f819b94-tmpfs\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026028 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk28j\" (UniqueName: \"kubernetes.io/projected/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-kube-api-access-qk28j\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026069 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-registration-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026090 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/34fcb740-58cd-4b32-aae6-8381a956ab5d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rwpg9\" (UID: \"34fcb740-58cd-4b32-aae6-8381a956ab5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026111 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/255659df-1859-49fe-8ced-0d05be9c5c4e-metrics-tls\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026130 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g522k\" (UniqueName: \"kubernetes.io/projected/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-kube-api-access-g522k\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026148 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-images\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026177 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c45a715-d81c-4b98-a687-e42af0efee14-serving-cert\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026194 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86nc9\" (UniqueName: \"kubernetes.io/projected/4992f05a-4ae9-4d48-a041-ed3d3f819b94-kube-api-access-86nc9\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026209 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026231 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06ab3f86-8279-44ee-a948-def749e60667-config\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026248 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c45a715-d81c-4b98-a687-e42af0efee14-config\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026275 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8vfl\" (UniqueName: \"kubernetes.io/projected/255659df-1859-49fe-8ced-0d05be9c5c4e-kube-api-access-t8vfl\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026301 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b3f291d5-f475-4f1d-9291-897465bb6cd7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026321 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4992f05a-4ae9-4d48-a041-ed3d3f819b94-apiservice-cert\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026340 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4992f05a-4ae9-4d48-a041-ed3d3f819b94-webhook-cert\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026406 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvvw5\" (UniqueName: \"kubernetes.io/projected/34fcb740-58cd-4b32-aae6-8381a956ab5d-kube-api-access-rvvw5\") pod \"package-server-manager-789f6589d5-rwpg9\" (UID: \"34fcb740-58cd-4b32-aae6-8381a956ab5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026433 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5624e6ea-f7c1-40bf-8463-45773373945d-srv-cert\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026450 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/48df3d15-6274-47b2-a109-9c9834e35563-srv-cert\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026469 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5pg9\" (UniqueName: \"kubernetes.io/projected/84647337-65f5-40fe-9ee7-62f1faebb04c-kube-api-access-m5pg9\") pod \"control-plane-machine-set-operator-78cbb6b69f-lk9tv\" (UID: \"84647337-65f5-40fe-9ee7-62f1faebb04c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026489 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szkz4\" (UniqueName: \"kubernetes.io/projected/8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c-kube-api-access-szkz4\") pod \"multus-admission-controller-857f4d67dd-qkzll\" (UID: \"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026507 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-signing-key\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026497 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dc15d380-65e8-493c-9e8d-2fbec9f058be-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026534 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-csi-data-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026557 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b4b405fa-53ce-43ea-ba96-cf08df54171c-images\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026578 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4992f05a-4ae9-4d48-a041-ed3d3f819b94-tmpfs\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026593 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4b405fa-53ce-43ea-ba96-cf08df54171c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026675 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh9xx\" (UniqueName: \"kubernetes.io/projected/a18d484e-c5e4-4460-8585-e4ccb6ec906c-kube-api-access-xh9xx\") pod \"cluster-samples-operator-665b6dd947-b59f5\" (UID: \"a18d484e-c5e4-4460-8585-e4ccb6ec906c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026713 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzh5x\" (UniqueName: \"kubernetes.io/projected/185c2ecc-dbb0-4666-b3ff-72f099427ccc-kube-api-access-lzh5x\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026735 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94zsq\" (UniqueName: \"kubernetes.io/projected/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-kube-api-access-94zsq\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026757 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qkzll\" (UID: \"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026801 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhtmn\" (UniqueName: \"kubernetes.io/projected/48df3d15-6274-47b2-a109-9c9834e35563-kube-api-access-mhtmn\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026831 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6l2z\" (UniqueName: \"kubernetes.io/projected/24a2b864-5a51-4283-8d44-578a6d40a6ce-kube-api-access-d6l2z\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026850 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f291d5-f475-4f1d-9291-897465bb6cd7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026875 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/255659df-1859-49fe-8ced-0d05be9c5c4e-config-volume\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026895 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/48df3d15-6274-47b2-a109-9c9834e35563-profile-collector-cert\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026919 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a18d484e-c5e4-4460-8585-e4ccb6ec906c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b59f5\" (UID: \"a18d484e-c5e4-4460-8585-e4ccb6ec906c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026947 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-socket-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.026983 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027004 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb8hh\" (UniqueName: \"kubernetes.io/projected/7c45a715-d81c-4b98-a687-e42af0efee14-kube-api-access-jb8hh\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027029 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06ab3f86-8279-44ee-a948-def749e60667-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027057 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/84647337-65f5-40fe-9ee7-62f1faebb04c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lk9tv\" (UID: \"84647337-65f5-40fe-9ee7-62f1faebb04c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027084 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-signing-cabundle\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027110 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027146 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027166 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc15d380-65e8-493c-9e8d-2fbec9f058be-serving-cert\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027186 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46575\" (UniqueName: \"kubernetes.io/projected/b4b405fa-53ce-43ea-ba96-cf08df54171c-kube-api-access-46575\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027224 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdwht\" (UniqueName: \"kubernetes.io/projected/31ab5383-1898-4964-8e8b-406b81b83fab-kube-api-access-vdwht\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027290 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027316 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f291d5-f475-4f1d-9291-897465bb6cd7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027345 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4e5bd586-f1c6-4522-af69-ee046c3bbcdb-cert\") pod \"ingress-canary-4csfn\" (UID: \"4e5bd586-f1c6-4522-af69-ee046c3bbcdb\") " pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027364 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24a2b864-5a51-4283-8d44-578a6d40a6ce-secret-volume\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027382 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7cnq\" (UniqueName: \"kubernetes.io/projected/5624e6ea-f7c1-40bf-8463-45773373945d-kube-api-access-s7cnq\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027387 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4b405fa-53ce-43ea-ba96-cf08df54171c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027407 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk4zp\" (UniqueName: \"kubernetes.io/projected/dc15d380-65e8-493c-9e8d-2fbec9f058be-kube-api-access-pk4zp\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027452 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-plugins-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027475 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfq9q\" (UniqueName: \"kubernetes.io/projected/4e5bd586-f1c6-4522-af69-ee046c3bbcdb-kube-api-access-dfq9q\") pod \"ingress-canary-4csfn\" (UID: \"4e5bd586-f1c6-4522-af69-ee046c3bbcdb\") " pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027493 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24a2b864-5a51-4283-8d44-578a6d40a6ce-config-volume\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027526 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5624e6ea-f7c1-40bf-8463-45773373945d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027558 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-node-bootstrap-token\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027593 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06ab3f86-8279-44ee-a948-def749e60667-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027648 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-config\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027679 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-mountpoint-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027695 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b4b405fa-53ce-43ea-ba96-cf08df54171c-proxy-tls\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.027721 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-certs\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.028673 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.029665 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-proxy-tls\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.030124 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c45a715-d81c-4b98-a687-e42af0efee14-config\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.030409 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-registration-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.031802 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c45a715-d81c-4b98-a687-e42af0efee14-serving-cert\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.031761 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-images\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.032848 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-signing-cabundle\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.033240 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.533217414 +0000 UTC m=+144.498774237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.033417 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06ab3f86-8279-44ee-a948-def749e60667-config\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.034114 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f291d5-f475-4f1d-9291-897465bb6cd7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.035121 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/84647337-65f5-40fe-9ee7-62f1faebb04c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lk9tv\" (UID: \"84647337-65f5-40fe-9ee7-62f1faebb04c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.035421 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-config\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.036058 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.036417 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qkzll\" (UID: \"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.036501 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/34fcb740-58cd-4b32-aae6-8381a956ab5d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rwpg9\" (UID: \"34fcb740-58cd-4b32-aae6-8381a956ab5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.036601 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-mountpoint-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.037205 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-certs\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.038600 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5624e6ea-f7c1-40bf-8463-45773373945d-srv-cert\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.038884 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-csi-data-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.039012 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-plugins-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.039286 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-metrics-tls\") pod \"dns-operator-744455d44c-7npb2\" (UID: \"3c37d766-5afd-4f91-a1bc-ec39345c9a5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.039373 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/185c2ecc-dbb0-4666-b3ff-72f099427ccc-socket-dir\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.040076 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/48df3d15-6274-47b2-a109-9c9834e35563-profile-collector-cert\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.040082 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/255659df-1859-49fe-8ced-0d05be9c5c4e-metrics-tls\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.040319 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/255659df-1859-49fe-8ced-0d05be9c5c4e-config-volume\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.040417 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4992f05a-4ae9-4d48-a041-ed3d3f819b94-webhook-cert\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.040533 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/48df3d15-6274-47b2-a109-9c9834e35563-srv-cert\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.040709 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24a2b864-5a51-4283-8d44-578a6d40a6ce-config-volume\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.040789 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.041261 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b4b405fa-53ce-43ea-ba96-cf08df54171c-images\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.041290 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a18d484e-c5e4-4460-8585-e4ccb6ec906c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b59f5\" (UID: \"a18d484e-c5e4-4460-8585-e4ccb6ec906c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.041320 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5624e6ea-f7c1-40bf-8463-45773373945d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.041600 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4992f05a-4ae9-4d48-a041-ed3d3f819b94-apiservice-cert\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.042094 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.042206 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4e5bd586-f1c6-4522-af69-ee046c3bbcdb-cert\") pod \"ingress-canary-4csfn\" (UID: \"4e5bd586-f1c6-4522-af69-ee046c3bbcdb\") " pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.042516 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-node-bootstrap-token\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.042725 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f291d5-f475-4f1d-9291-897465bb6cd7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.043401 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc15d380-65e8-493c-9e8d-2fbec9f058be-serving-cert\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.043768 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b4b405fa-53ce-43ea-ba96-cf08df54171c-proxy-tls\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.043960 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.045210 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06ab3f86-8279-44ee-a948-def749e60667-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.046179 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24a2b864-5a51-4283-8d44-578a6d40a6ce-secret-volume\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.048347 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-signing-key\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.067887 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.069881 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.080393 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-267mt\" (UniqueName: \"kubernetes.io/projected/3c37d766-5afd-4f91-a1bc-ec39345c9a5b-kube-api-access-267mt\") pod \"dns-operator-744455d44c-7npb2\" (UID: \"3c37d766-5afd-4f91-a1bc-ec39345c9a5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.091972 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.113865 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.115152 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzx7g\" (UniqueName: \"kubernetes.io/projected/f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68-kube-api-access-lzx7g\") pod \"downloads-7954f5f757-7v9g5\" (UID: \"f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68\") " pod="openshift-console/downloads-7954f5f757-7v9g5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.125002 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7z2z\" (UniqueName: \"kubernetes.io/projected/c9c368d9-2375-4cef-8476-ce97bf83ab77-kube-api-access-q7z2z\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.130576 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.131018 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.63097318 +0000 UTC m=+144.596529793 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.131425 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.131893 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.631873226 +0000 UTC m=+144.597429839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.133437 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.155228 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.163458 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlwsf\" (UniqueName: \"kubernetes.io/projected/8da083df-54d4-496a-81b6-52afe3a0c4e9-kube-api-access-zlwsf\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.167539 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.176452 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hr9b\" (UniqueName: \"kubernetes.io/projected/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-kube-api-access-7hr9b\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.188640 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.217361 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.219741 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-trusted-ca\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.228180 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.232996 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.233188 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.733162217 +0000 UTC m=+144.698718840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.234036 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.234475 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.734462815 +0000 UTC m=+144.700019488 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.239985 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c368d9-2375-4cef-8476-ce97bf83ab77-serving-cert\") pod \"apiserver-76f77b778f-pz8gv\" (UID: \"c9c368d9-2375-4cef-8476-ce97bf83ab77\") " pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.248462 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.260509 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e603b6d2-229f-4655-a40b-ae18d1fe0d5b-serving-cert\") pod \"console-operator-58897d9998-48rh6\" (UID: \"e603b6d2-229f-4655-a40b-ae18d1fe0d5b\") " pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.268451 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.279888 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8da083df-54d4-496a-81b6-52afe3a0c4e9-service-ca-bundle\") pod \"authentication-operator-69f744f599-6lbmc\" (UID: \"8da083df-54d4-496a-81b6-52afe3a0c4e9\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.296149 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.303253 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.315781 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.323181 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rchl7\" (UniqueName: \"kubernetes.io/projected/7312c8b5-d029-4dad-99e8-d90247bb08a1-kube-api-access-rchl7\") pod \"migrator-59844c95c7-49td8\" (UID: \"7312c8b5-d029-4dad-99e8-d90247bb08a1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.331287 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.334604 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.335206 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.835184159 +0000 UTC m=+144.800740772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.340678 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7v9g5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.347085 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrj5c\" (UniqueName: \"kubernetes.io/projected/dbec332a-71ca-4cf0-9c06-9f95853911b7-kube-api-access-nrj5c\") pod \"kube-storage-version-migrator-operator-b67b599dd-qcjc4\" (UID: \"dbec332a-71ca-4cf0-9c06-9f95853911b7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.365063 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7zjx\" (UniqueName: \"kubernetes.io/projected/68d4e48c-5240-4e89-84cf-c837158cc6b8-kube-api-access-p7zjx\") pod \"machine-approver-56656f9798-kf9l4\" (UID: \"68d4e48c-5240-4e89-84cf-c837158cc6b8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.385446 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5mw4\" (UniqueName: \"kubernetes.io/projected/c418f3b3-41e5-4185-84f7-22f8dd9c5431-kube-api-access-b5mw4\") pod \"route-controller-manager-6576b87f9c-8bsnl\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.409384 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srpxd\" (UniqueName: \"kubernetes.io/projected/2dc6fe42-5c34-4d22-a348-d5dd521247c1-kube-api-access-srpxd\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.426032 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-bound-sa-token\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.443020 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.444443 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.444939 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:25.9449222 +0000 UTC m=+144.910478813 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.453651 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2dc6fe42-5c34-4d22-a348-d5dd521247c1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rdq6c\" (UID: \"2dc6fe42-5c34-4d22-a348-d5dd521247c1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.464322 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.490841 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgxw9\" (UniqueName: \"kubernetes.io/projected/ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca-kube-api-access-kgxw9\") pod \"openshift-apiserver-operator-796bbdcf4f-kcq4j\" (UID: \"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.491424 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hmv9\" (UniqueName: \"kubernetes.io/projected/7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f-kube-api-access-4hmv9\") pod \"etcd-operator-b45778765-56p4s\" (UID: \"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.499973 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.509050 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.525157 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whzhq\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-kube-api-access-whzhq\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.546447 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.547882 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snkrl\" (UniqueName: \"kubernetes.io/projected/72843d3c-0fcf-4436-9516-164af96ca830-kube-api-access-snkrl\") pod \"router-default-5444994796-qksdj\" (UID: \"72843d3c-0fcf-4436-9516-164af96ca830\") " pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.547973 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.04788803 +0000 UTC m=+145.013444643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.548016 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.560042 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-48rh6"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.564295 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.568760 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zm55\" (UniqueName: \"kubernetes.io/projected/7ae3b38e-2c09-4945-abb8-888051f4132a-kube-api-access-7zm55\") pod \"openshift-controller-manager-operator-756b6f6bc6-h96hj\" (UID: \"7ae3b38e-2c09-4945-abb8-888051f4132a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.585383 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tx5q\" (UniqueName: \"kubernetes.io/projected/34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7-kube-api-access-5tx5q\") pod \"ingress-operator-5b745b69d9-twgch\" (UID: \"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:25 crc kubenswrapper[5081]: W1003 15:30:25.596906 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68d4e48c_5240_4e89_84cf_c837158cc6b8.slice/crio-16009e5a4f2c19dcb6127210dd34484cf8afcfc1107e9e1e67c29996c3d15277 WatchSource:0}: Error finding container 16009e5a4f2c19dcb6127210dd34484cf8afcfc1107e9e1e67c29996c3d15277: Status 404 returned error can't find the container with id 16009e5a4f2c19dcb6127210dd34484cf8afcfc1107e9e1e67c29996c3d15277 Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.606669 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhz5p\" (UniqueName: \"kubernetes.io/projected/87f6080d-917b-4d20-a744-9fb3bad43a77-kube-api-access-jhz5p\") pod \"console-f9d7485db-hr5zj\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.625328 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7npb2"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.625907 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-48rh6" event={"ID":"e603b6d2-229f-4655-a40b-ae18d1fe0d5b","Type":"ContainerStarted","Data":"0e143782e03f0dc837d63f673951dea2009540b592a23f4ab0d93099a9003122"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.628366 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk28j\" (UniqueName: \"kubernetes.io/projected/b43aaaa2-f6bb-449b-90ce-d7324dd5a06d-kube-api-access-qk28j\") pod \"machine-api-operator-5694c8668f-qk55l\" (UID: \"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.637927 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" event={"ID":"c7d19f3e-957e-4642-8ada-19424fca2e00","Type":"ContainerStarted","Data":"106b8b32e03abdf4ac982ae2301802281c79a2d6807aef8adc84b8d270f253f0"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.637984 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" event={"ID":"c7d19f3e-957e-4642-8ada-19424fca2e00","Type":"ContainerStarted","Data":"3c917ab42f390a261ca5a7b7fe86fde67d965c76b7ed71daaee8668ad309f3d3"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.644727 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" event={"ID":"69e0e14b-c635-4027-b3ac-f89fd4d71f1b","Type":"ContainerStarted","Data":"2eb004ffa330f9acf87394d441ccd0eab644c8ad37798418a3b389c83f3aba10"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.644796 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" event={"ID":"69e0e14b-c635-4027-b3ac-f89fd4d71f1b","Type":"ContainerStarted","Data":"2a5984f8bb8d5dee706b85552593e8a012bf69f5d42ae326f623bdab1d2d6eee"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.645200 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.647579 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq6dz\" (UniqueName: \"kubernetes.io/projected/f9ea2cb1-8cc0-48f9-8777-c65838994ea4-kube-api-access-nq6dz\") pod \"machine-config-controller-84d6567774-c6dxw\" (UID: \"f9ea2cb1-8cc0-48f9-8777-c65838994ea4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.649198 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.649814 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.149799709 +0000 UTC m=+145.115356322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.650800 5081 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-cgxc7 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.650874 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" podUID="69e0e14b-c635-4027-b3ac-f89fd4d71f1b" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.662059 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.662391 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" event={"ID":"68d4e48c-5240-4e89-84cf-c837158cc6b8","Type":"ContainerStarted","Data":"16009e5a4f2c19dcb6127210dd34484cf8afcfc1107e9e1e67c29996c3d15277"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.669096 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk4zp\" (UniqueName: \"kubernetes.io/projected/dc15d380-65e8-493c-9e8d-2fbec9f058be-kube-api-access-pk4zp\") pod \"openshift-config-operator-7777fb866f-5jvtx\" (UID: \"dc15d380-65e8-493c-9e8d-2fbec9f058be\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.672814 5081 generic.go:334] "Generic (PLEG): container finished" podID="713bccb3-37e5-4795-8edf-ed3dd44bb275" containerID="6e4bdbccf74974d7c7bcba951ecbe69ca68dcf2cd4d2a1b74d442f92e2421782" exitCode=0 Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.672930 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" event={"ID":"713bccb3-37e5-4795-8edf-ed3dd44bb275","Type":"ContainerDied","Data":"6e4bdbccf74974d7c7bcba951ecbe69ca68dcf2cd4d2a1b74d442f92e2421782"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.673034 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" event={"ID":"713bccb3-37e5-4795-8edf-ed3dd44bb275","Type":"ContainerStarted","Data":"3f3df7f4e924485b8898b3bdf42d4be99e9b8bb42bb398d31f9331e29a7a13cf"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.675993 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" event={"ID":"86d5b7db-0c22-4446-9bff-2ff5493f9288","Type":"ContainerStarted","Data":"83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.676031 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" event={"ID":"86d5b7db-0c22-4446-9bff-2ff5493f9288","Type":"ContainerStarted","Data":"20a4ab50cc4c73133827053d5f8edc116b7d8a5f20cf1f4092e7cae5028276cb"} Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.676430 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.686187 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.688676 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.691442 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh9xx\" (UniqueName: \"kubernetes.io/projected/a18d484e-c5e4-4460-8585-e4ccb6ec906c-kube-api-access-xh9xx\") pod \"cluster-samples-operator-665b6dd947-b59f5\" (UID: \"a18d484e-c5e4-4460-8585-e4ccb6ec906c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.693914 5081 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-ntvnz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" start-of-body= Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.694004 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" podUID="86d5b7db-0c22-4446-9bff-2ff5493f9288" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.703688 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzh5x\" (UniqueName: \"kubernetes.io/projected/185c2ecc-dbb0-4666-b3ff-72f099427ccc-kube-api-access-lzh5x\") pod \"csi-hostpathplugin-clvgs\" (UID: \"185c2ecc-dbb0-4666-b3ff-72f099427ccc\") " pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.713261 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.728451 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94zsq\" (UniqueName: \"kubernetes.io/projected/124c4817-4bb4-49ab-b36c-16cb1f95f4d0-kube-api-access-94zsq\") pod \"service-ca-9c57cc56f-smss4\" (UID: \"124c4817-4bb4-49ab-b36c-16cb1f95f4d0\") " pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.731787 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.741550 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.748134 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b3f291d5-f475-4f1d-9291-897465bb6cd7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7z5mn\" (UID: \"b3f291d5-f475-4f1d-9291-897465bb6cd7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.750680 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.752274 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.252242343 +0000 UTC m=+145.217798956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.759528 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.768290 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8vfl\" (UniqueName: \"kubernetes.io/projected/255659df-1859-49fe-8ced-0d05be9c5c4e-kube-api-access-t8vfl\") pod \"dns-default-9ksc9\" (UID: \"255659df-1859-49fe-8ced-0d05be9c5c4e\") " pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.785010 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.805639 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86nc9\" (UniqueName: \"kubernetes.io/projected/4992f05a-4ae9-4d48-a041-ed3d3f819b94-kube-api-access-86nc9\") pod \"packageserver-d55dfcdfc-6k8gs\" (UID: \"4992f05a-4ae9-4d48-a041-ed3d3f819b94\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.834524 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvvw5\" (UniqueName: \"kubernetes.io/projected/34fcb740-58cd-4b32-aae6-8381a956ab5d-kube-api-access-rvvw5\") pod \"package-server-manager-789f6589d5-rwpg9\" (UID: \"34fcb740-58cd-4b32-aae6-8381a956ab5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.841798 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.848201 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.853244 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.853942 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.353921705 +0000 UTC m=+145.319478318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.857201 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g522k\" (UniqueName: \"kubernetes.io/projected/972eae77-0b70-430b-bfd3-5a9b4b2ed7b1-kube-api-access-g522k\") pod \"machine-config-server-c8w4s\" (UID: \"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1\") " pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.869200 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhtmn\" (UniqueName: \"kubernetes.io/projected/48df3d15-6274-47b2-a109-9c9834e35563-kube-api-access-mhtmn\") pod \"catalog-operator-68c6474976-72wbm\" (UID: \"48df3d15-6274-47b2-a109-9c9834e35563\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.873244 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.876121 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6l2z\" (UniqueName: \"kubernetes.io/projected/24a2b864-5a51-4283-8d44-578a6d40a6ce-kube-api-access-d6l2z\") pod \"collect-profiles-29325090-jzr7r\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.876587 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.881742 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.892222 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7v9g5"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.900459 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.901939 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.921669 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.923993 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb8hh\" (UniqueName: \"kubernetes.io/projected/7c45a715-d81c-4b98-a687-e42af0efee14-kube-api-access-jb8hh\") pod \"service-ca-operator-777779d784-mpp4g\" (UID: \"7c45a715-d81c-4b98-a687-e42af0efee14\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.926489 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06ab3f86-8279-44ee-a948-def749e60667-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sjkfz\" (UID: \"06ab3f86-8279-44ee-a948-def749e60667\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.926663 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.932783 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6lbmc"] Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.955715 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.956977 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7cnq\" (UniqueName: \"kubernetes.io/projected/5624e6ea-f7c1-40bf-8463-45773373945d-kube-api-access-s7cnq\") pod \"olm-operator-6b444d44fb-jrrbf\" (UID: \"5624e6ea-f7c1-40bf-8463-45773373945d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.957526 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szkz4\" (UniqueName: \"kubernetes.io/projected/8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c-kube-api-access-szkz4\") pod \"multus-admission-controller-857f4d67dd-qkzll\" (UID: \"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.959358 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c"] Oct 03 15:30:25 crc kubenswrapper[5081]: E1003 15:30:25.964852 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.46481214 +0000 UTC m=+145.430368753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.965972 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.970655 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.971299 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdwht\" (UniqueName: \"kubernetes.io/projected/31ab5383-1898-4964-8e8b-406b81b83fab-kube-api-access-vdwht\") pod \"marketplace-operator-79b997595-8p57q\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:25 crc kubenswrapper[5081]: I1003 15:30:25.981013 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-pz8gv"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.001375 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46575\" (UniqueName: \"kubernetes.io/projected/b4b405fa-53ce-43ea-ba96-cf08df54171c-kube-api-access-46575\") pod \"machine-config-operator-74547568cd-jkbhb\" (UID: \"b4b405fa-53ce-43ea-ba96-cf08df54171c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.002797 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-smss4" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.014930 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.015833 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5pg9\" (UniqueName: \"kubernetes.io/projected/84647337-65f5-40fe-9ee7-62f1faebb04c-kube-api-access-m5pg9\") pod \"control-plane-machine-set-operator-78cbb6b69f-lk9tv\" (UID: \"84647337-65f5-40fe-9ee7-62f1faebb04c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.029052 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfq9q\" (UniqueName: \"kubernetes.io/projected/4e5bd586-f1c6-4522-af69-ee046c3bbcdb-kube-api-access-dfq9q\") pod \"ingress-canary-4csfn\" (UID: \"4e5bd586-f1c6-4522-af69-ee046c3bbcdb\") " pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.038994 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4csfn" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.047777 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.054738 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-c8w4s" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.057659 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.058297 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.558274479 +0000 UTC m=+145.523831102 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: W1003 15:30:26.111417 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9c368d9_2375_4cef_8476_ce97bf83ab77.slice/crio-b2f377f76fc234ac9ca2c4ad56a4c371c0c758dd19891880c67fa42adcae7eee WatchSource:0}: Error finding container b2f377f76fc234ac9ca2c4ad56a4c371c0c758dd19891880c67fa42adcae7eee: Status 404 returned error can't find the container with id b2f377f76fc234ac9ca2c4ad56a4c371c0c758dd19891880c67fa42adcae7eee Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.159352 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.159886 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.659839417 +0000 UTC m=+145.625396040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.176766 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-hr5zj"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.204917 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.213354 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.221812 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.241997 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.249964 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-qk55l"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.253137 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.273697 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.274403 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.77438728 +0000 UTC m=+145.739943893 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.278571 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.280513 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-56p4s"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.374841 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.374972 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.874944849 +0000 UTC m=+145.840501452 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.375392 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.376096 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.876072342 +0000 UTC m=+145.841628945 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: W1003 15:30:26.408232 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb43aaaa2_f6bb_449b_90ce_d7324dd5a06d.slice/crio-3b0baa75de85b92b745d32d3fa7283920627b3c48ccc2234bfb1463227b38fe3 WatchSource:0}: Error finding container 3b0baa75de85b92b745d32d3fa7283920627b3c48ccc2234bfb1463227b38fe3: Status 404 returned error can't find the container with id 3b0baa75de85b92b745d32d3fa7283920627b3c48ccc2234bfb1463227b38fe3 Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.436913 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.454235 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.485972 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.486460 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:26.986436242 +0000 UTC m=+145.951992855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.589189 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-84jgh" podStartSLOduration=122.589157075 podStartE2EDuration="2m2.589157075s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:26.547683286 +0000 UTC m=+145.513239899" watchObservedRunningTime="2025-10-03 15:30:26.589157075 +0000 UTC m=+145.554713688" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.590727 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.591219 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.091193955 +0000 UTC m=+146.056750568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.593509 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-clvgs"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.687129 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7v9g5" event={"ID":"f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68","Type":"ContainerStarted","Data":"dc7fc0307edf567f5af6b192ffc3d5a7282092f8d4b37ebc4d172ae3d7fbe03b"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.689829 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qksdj" event={"ID":"72843d3c-0fcf-4436-9516-164af96ca830","Type":"ContainerStarted","Data":"8742ec240e5b1c700c7efcc7e9fe6ee1cdd92c980d69984a61bcb3b067de6daa"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.692941 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.693131 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.193099264 +0000 UTC m=+146.158655877 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.693642 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.694311 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.194299239 +0000 UTC m=+146.159855852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.703210 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" event={"ID":"3c37d766-5afd-4f91-a1bc-ec39345c9a5b","Type":"ContainerStarted","Data":"0f342ec9ebd91da08c9132e37aa354fc8e3cfd2358a84ab0811484e58173cc11"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.703288 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" event={"ID":"3c37d766-5afd-4f91-a1bc-ec39345c9a5b","Type":"ContainerStarted","Data":"27ce864961fef70dbfc08e2ca43515a1e4f6e8fd490d2ecad82112e507b0bfbc"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.718464 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9"] Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.732102 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" event={"ID":"dbec332a-71ca-4cf0-9c06-9f95853911b7","Type":"ContainerStarted","Data":"56e533f42986b2841baece34639a058603f7a6955d98e30694f6b945068011c3"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.733429 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" event={"ID":"c418f3b3-41e5-4185-84f7-22f8dd9c5431","Type":"ContainerStarted","Data":"ed25ec0ce0aa23ccc2869c14a106c469e69e732c482d3779d36f00a912ca809a"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.735053 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hr5zj" event={"ID":"87f6080d-917b-4d20-a744-9fb3bad43a77","Type":"ContainerStarted","Data":"7e0b72764571169d6ef421b91c63fb9280939d74aa35344e8923e38e16041f0f"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.737269 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" event={"ID":"2dc6fe42-5c34-4d22-a348-d5dd521247c1","Type":"ContainerStarted","Data":"8d6c778b663e96057adc7311c1ee7c3ef1eb149dd91ccf6c423467f9ac077e9c"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.743287 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" event={"ID":"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d","Type":"ContainerStarted","Data":"3b0baa75de85b92b745d32d3fa7283920627b3c48ccc2234bfb1463227b38fe3"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.764723 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" event={"ID":"c9c368d9-2375-4cef-8476-ce97bf83ab77","Type":"ContainerStarted","Data":"b2f377f76fc234ac9ca2c4ad56a4c371c0c758dd19891880c67fa42adcae7eee"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.778397 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" event={"ID":"68d4e48c-5240-4e89-84cf-c837158cc6b8","Type":"ContainerStarted","Data":"5833a4b8cfa812661d8545a1a5abe05fd054eecbe79b5df1cd1147152d75ddc1"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.787226 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" event={"ID":"713bccb3-37e5-4795-8edf-ed3dd44bb275","Type":"ContainerStarted","Data":"13677945ade287653930957e6d9ad7b07cf82e0849986b6e74aba0c7eb84ba1f"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.796102 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" event={"ID":"8da083df-54d4-496a-81b6-52afe3a0c4e9","Type":"ContainerStarted","Data":"9f1b42f64060e472f029a9691947a16871db531be1041cb6a1b9ed5551d7dc81"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.798778 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.799007 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.298949688 +0000 UTC m=+146.264506301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.799072 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.800292 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.300281678 +0000 UTC m=+146.265838291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.843131 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" event={"ID":"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f","Type":"ContainerStarted","Data":"742b4ca049feda7fd561eb907909fe2a1406d000b59637f1aed3ed82f7ea12e5"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.852033 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" event={"ID":"7312c8b5-d029-4dad-99e8-d90247bb08a1","Type":"ContainerStarted","Data":"982f41f6f83f48415b69b29df582db39c33fc13bfd58f057ac632ec7520a488a"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.878539 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-48rh6" event={"ID":"e603b6d2-229f-4655-a40b-ae18d1fe0d5b","Type":"ContainerStarted","Data":"27646d70a4c781e6d6a0906e623cf4724b499d5c25560555e04f54a6068058a3"} Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.898738 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.899239 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.899904 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.900466 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.400076884 +0000 UTC m=+146.365633487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:26 crc kubenswrapper[5081]: I1003 15:30:26.900589 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:26 crc kubenswrapper[5081]: E1003 15:30:26.902744 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.402712642 +0000 UTC m=+146.368269465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.005710 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.505665912 +0000 UTC m=+146.471222525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.008540 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.009419 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.012358 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.512339469 +0000 UTC m=+146.477896082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.102992 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx"] Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.111015 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.111432 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.611416084 +0000 UTC m=+146.576972697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.146299 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-twgch"] Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.214806 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.215678 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.715664182 +0000 UTC m=+146.681220795 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.319144 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.319825 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.819797907 +0000 UTC m=+146.785354520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.393969 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" podStartSLOduration=123.393950574 podStartE2EDuration="2m3.393950574s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:27.346590271 +0000 UTC m=+146.312146894" watchObservedRunningTime="2025-10-03 15:30:27.393950574 +0000 UTC m=+146.359507177" Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.423553 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.437384 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:27.936902986 +0000 UTC m=+146.902459599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.527738 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.528077 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.028059286 +0000 UTC m=+146.993615899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.629182 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.630092 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.130074748 +0000 UTC m=+147.095631361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.660049 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" podStartSLOduration=123.660028486 podStartE2EDuration="2m3.660028486s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:27.657745228 +0000 UTC m=+146.623301841" watchObservedRunningTime="2025-10-03 15:30:27.660028486 +0000 UTC m=+146.625585099" Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.734695 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.735074 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.235058668 +0000 UTC m=+147.200615271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.773266 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj"] Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.836331 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.836803 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.336787622 +0000 UTC m=+147.302344235 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.901675 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5"] Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.912331 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn"] Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.938015 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:27 crc kubenswrapper[5081]: E1003 15:30:27.938415 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.438394081 +0000 UTC m=+147.403950694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.945020 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-48rh6" podStartSLOduration=123.944989517 podStartE2EDuration="2m3.944989517s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:27.888168454 +0000 UTC m=+146.853725067" watchObservedRunningTime="2025-10-03 15:30:27.944989517 +0000 UTC m=+146.910546130" Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.950237 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" event={"ID":"dc15d380-65e8-493c-9e8d-2fbec9f058be","Type":"ContainerStarted","Data":"e55abc44906c45579ee615669609fa773af9b88eb88c7b6b9ce5dd0731c7fb14"} Oct 03 15:30:27 crc kubenswrapper[5081]: W1003 15:30:27.971215 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3f291d5_f475_4f1d_9291_897465bb6cd7.slice/crio-f2228544f7e98e46b2f4df62e65dd5a033bdf4507a42493fad085e8feeb53e92 WatchSource:0}: Error finding container f2228544f7e98e46b2f4df62e65dd5a033bdf4507a42493fad085e8feeb53e92: Status 404 returned error can't find the container with id f2228544f7e98e46b2f4df62e65dd5a033bdf4507a42493fad085e8feeb53e92 Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.984645 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" event={"ID":"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7","Type":"ContainerStarted","Data":"8fd58efb42a165ab6f7a7f96c06e0a169f1df5d8ca77045faf3abee4312e389c"} Oct 03 15:30:27 crc kubenswrapper[5081]: I1003 15:30:27.989586 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" podStartSLOduration=123.989536806 podStartE2EDuration="2m3.989536806s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:27.975268354 +0000 UTC m=+146.940824987" watchObservedRunningTime="2025-10-03 15:30:27.989536806 +0000 UTC m=+146.955093419" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.011205 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" event={"ID":"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca","Type":"ContainerStarted","Data":"4f4a2add9c533357d2d09653fb82229c5793e5f63ce8e00b4413970b7a644e53"} Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.030630 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" event={"ID":"34fcb740-58cd-4b32-aae6-8381a956ab5d","Type":"ContainerStarted","Data":"34edddefb6346e4047074d26600dc5955a29b67bc9b39af68c889206df1a0a1a"} Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.037731 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" event={"ID":"185c2ecc-dbb0-4666-b3ff-72f099427ccc","Type":"ContainerStarted","Data":"6c5f7647965c7ac00dfcf8c07fb4a86b421cee38ad54306750859b38e57d02a0"} Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.040043 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.040520 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.540501676 +0000 UTC m=+147.506058289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.045040 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" event={"ID":"c418f3b3-41e5-4185-84f7-22f8dd9c5431","Type":"ContainerStarted","Data":"35e616c3f3866571889739d9a68ecf92e02aa15cfc063f58e194c7945ea84f40"} Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.046918 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.071371 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" podStartSLOduration=124.07134993 podStartE2EDuration="2m4.07134993s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:28.070107733 +0000 UTC m=+147.035664346" watchObservedRunningTime="2025-10-03 15:30:28.07134993 +0000 UTC m=+147.036906543" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.116485 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" podStartSLOduration=124.116458546 podStartE2EDuration="2m4.116458546s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:28.109632114 +0000 UTC m=+147.075188747" watchObservedRunningTime="2025-10-03 15:30:28.116458546 +0000 UTC m=+147.082015159" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.138455 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.140892 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.142307 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.143593 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.643554409 +0000 UTC m=+147.609111022 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.144198 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-c8w4s" event={"ID":"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1","Type":"ContainerStarted","Data":"5c783d47f844582db2005abaccfd5d0b159e9a65fbe8a991ba6a7132c79b0812"} Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.151711 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-qksdj" podStartSLOduration=124.15168768 podStartE2EDuration="2m4.15168768s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:28.151197575 +0000 UTC m=+147.116754208" watchObservedRunningTime="2025-10-03 15:30:28.15168768 +0000 UTC m=+147.117244303" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.158973 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" event={"ID":"f9ea2cb1-8cc0-48f9-8777-c65838994ea4","Type":"ContainerStarted","Data":"5afb7bbdb312d3f6856bb558a20f31bfbb19bc38795c18fe4de4f82afac382ad"} Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.164179 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.215830 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-48rh6" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.239054 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-smss4"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.245695 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.252007 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.751985361 +0000 UTC m=+147.717541974 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.351640 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4csfn"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.352941 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.358220 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.858194707 +0000 UTC m=+147.823751320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.363034 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.379045 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.416477 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qkzll"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.458505 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9ksc9"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.461246 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.461758 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:28.961741384 +0000 UTC m=+147.927297987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.571610 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.571967 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.071949849 +0000 UTC m=+148.037506462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.577845 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.672946 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.673358 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.173341492 +0000 UTC m=+148.138898105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.697283 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.716803 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.717591 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g"] Oct 03 15:30:28 crc kubenswrapper[5081]: W1003 15:30:28.725218 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06ab3f86_8279_44ee_a948_def749e60667.slice/crio-e1b7dc16f62678e93373305cf16bf0be6934bc04fb30b8332d370936668a0c94 WatchSource:0}: Error finding container e1b7dc16f62678e93373305cf16bf0be6934bc04fb30b8332d370936668a0c94: Status 404 returned error can't find the container with id e1b7dc16f62678e93373305cf16bf0be6934bc04fb30b8332d370936668a0c94 Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.745147 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8p57q"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.763014 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.765685 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf"] Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.769313 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.769421 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.787699 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.791240 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.291202964 +0000 UTC m=+148.256759727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.896397 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.896920 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.396898585 +0000 UTC m=+148.362455198 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:28 crc kubenswrapper[5081]: I1003 15:30:28.999031 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:28 crc kubenswrapper[5081]: E1003 15:30:28.999542 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.499503414 +0000 UTC m=+148.465060027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:28.999794 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.000342 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.500320098 +0000 UTC m=+148.465876731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.101512 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.101764 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.601700871 +0000 UTC m=+148.567257484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.102343 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.102931 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.602917417 +0000 UTC m=+148.568474030 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.167262 5081 generic.go:334] "Generic (PLEG): container finished" podID="c9c368d9-2375-4cef-8476-ce97bf83ab77" containerID="1bf0e03d8586e58da3b1836f63460e0b64a1df566f95d7841a26313b1bf4e8ff" exitCode=0 Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.167381 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" event={"ID":"c9c368d9-2375-4cef-8476-ce97bf83ab77","Type":"ContainerDied","Data":"1bf0e03d8586e58da3b1836f63460e0b64a1df566f95d7841a26313b1bf4e8ff"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.171130 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" event={"ID":"48df3d15-6274-47b2-a109-9c9834e35563","Type":"ContainerStarted","Data":"bbe324c15d314ac01565e277b3224f9c7ae1831e4c7d22bb6b17492bd89ce052"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.175805 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9ksc9" event={"ID":"255659df-1859-49fe-8ced-0d05be9c5c4e","Type":"ContainerStarted","Data":"dc329e300741f23f36b3337c4a6e41c6930b823eb4594adaffa777c92c100d5a"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.178832 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" event={"ID":"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7","Type":"ContainerStarted","Data":"d0a781a9f51b3ec81d00d75ea736c7a267852a08bae18a8ed719360dcc4d7a26"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.186228 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" event={"ID":"24a2b864-5a51-4283-8d44-578a6d40a6ce","Type":"ContainerStarted","Data":"9fd99c6a78d401a2b54ed5d1d022ed74e7558a24ceddec85898501ec1dffaa4d"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.188699 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" event={"ID":"34fcb740-58cd-4b32-aae6-8381a956ab5d","Type":"ContainerStarted","Data":"893ea1081e040b685a4f5962bb64019eeacbb9b71500cd4e075fca5517b6747d"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.203285 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.204687 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.704664061 +0000 UTC m=+148.670220674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.213245 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" event={"ID":"7fe294d0-3c3c-4802-9a6c-5ccf7b2eb36f","Type":"ContainerStarted","Data":"a6ef2d25a4eb20437aac93fa88e571e42da717e1717ef032ca9a9b4dc3399ebe"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.223139 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" event={"ID":"7c45a715-d81c-4b98-a687-e42af0efee14","Type":"ContainerStarted","Data":"b2b113c5477d15c397eeaae55b58b34924a535d1cecb9cb137ec3b8481e95eff"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.233051 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" event={"ID":"2dc6fe42-5c34-4d22-a348-d5dd521247c1","Type":"ContainerStarted","Data":"0e3967e8fd18e31a153f0516eb8553629603aa816e94b4bccddfccd9211fcc4a"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.265485 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7v9g5" event={"ID":"f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68","Type":"ContainerStarted","Data":"65ca65f2aca60e8973e91004868f53fefbb7f3ce5b5f1dd7830dff52e9587662"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.266932 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7v9g5" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.280412 5081 patch_prober.go:28] interesting pod/downloads-7954f5f757-7v9g5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.280489 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7v9g5" podUID="f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.304882 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.305430 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.805411026 +0000 UTC m=+148.770967639 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.317764 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" event={"ID":"7312c8b5-d029-4dad-99e8-d90247bb08a1","Type":"ContainerStarted","Data":"09e7b9a466df8d0c3d6032697c49dd15cfe4b9264c5dddd2b140ac640df1acb9"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.318916 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" event={"ID":"f9ea2cb1-8cc0-48f9-8777-c65838994ea4","Type":"ContainerStarted","Data":"9cefdeaf7ec648554e700373513da14645b9319677cf21f8a27c1019df06d836"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.319535 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" event={"ID":"b4b405fa-53ce-43ea-ba96-cf08df54171c","Type":"ContainerStarted","Data":"67330b627a00475b8f65d9d5ccf88b9d9472136fd1046de4695248afe3a298de"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.320460 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qksdj" event={"ID":"72843d3c-0fcf-4436-9516-164af96ca830","Type":"ContainerStarted","Data":"ede427dea1f3b270c771d435204868b42fd0e6bb1905df60f0fd9e81261d392b"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.336369 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-smss4" event={"ID":"124c4817-4bb4-49ab-b36c-16cb1f95f4d0","Type":"ContainerStarted","Data":"67459b8fcee8a7136e3203633edf66fb0d42371653056bbedfc7bd5a3e423bed"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.345709 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" event={"ID":"84647337-65f5-40fe-9ee7-62f1faebb04c","Type":"ContainerStarted","Data":"8c574b5c994e18f4d262cd8892ebeb25a85cf5c7461915dba73d869ba0399ee2"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.346431 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" event={"ID":"4992f05a-4ae9-4d48-a041-ed3d3f819b94","Type":"ContainerStarted","Data":"a19f4a24f89f653ea77bc6ac996cf032ca6ded3775644de0c990039dfb79c161"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.347019 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" event={"ID":"5624e6ea-f7c1-40bf-8463-45773373945d","Type":"ContainerStarted","Data":"eb647ec112db2a9303d060f0ab481e6cc9e6e87e5ad1a75103dc8456dffff05d"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.347644 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" event={"ID":"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c","Type":"ContainerStarted","Data":"52f060c52e67895ddfc2c77888b07237c6d06e9a9fab5e3b799804a0bebad4ab"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.348208 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" event={"ID":"31ab5383-1898-4964-8e8b-406b81b83fab","Type":"ContainerStarted","Data":"7df8465742cdf8b03806db6f7730fa0b04919afc47d2c596823d8e57a3ea26c1"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.354193 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" event={"ID":"b3f291d5-f475-4f1d-9291-897465bb6cd7","Type":"ContainerStarted","Data":"f2228544f7e98e46b2f4df62e65dd5a033bdf4507a42493fad085e8feeb53e92"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.355942 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-56p4s" podStartSLOduration=125.355929042 podStartE2EDuration="2m5.355929042s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.285441004 +0000 UTC m=+148.250997637" watchObservedRunningTime="2025-10-03 15:30:29.355929042 +0000 UTC m=+148.321485655" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.372141 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" event={"ID":"7ae3b38e-2c09-4945-abb8-888051f4132a","Type":"ContainerStarted","Data":"b5041d61a1321d0a63695b35def4371789acfaaef4c9da685c98af02b7295f10"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.404665 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" event={"ID":"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d","Type":"ContainerStarted","Data":"6f12b9fb47a3ffcd8fd0241657cb85ad6a0e4fe6d63f05f435808340f21fc32f"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.407071 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdq6c" podStartSLOduration=125.407053927 podStartE2EDuration="2m5.407053927s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.359162298 +0000 UTC m=+148.324718911" watchObservedRunningTime="2025-10-03 15:30:29.407053927 +0000 UTC m=+148.372610540" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.408259 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.410195 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:29.910178039 +0000 UTC m=+148.875734652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.419039 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" event={"ID":"dbec332a-71ca-4cf0-9c06-9f95853911b7","Type":"ContainerStarted","Data":"75b6ebc38fccd14c196245c6e79e106c941590c80e0976fc3acd4a850a5f9872"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.440333 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" event={"ID":"ed7fcc9b-786d-48c0-89f8-86ee67e5b0ca","Type":"ContainerStarted","Data":"724cffc2431a8851af3a5d068bb347ef45d611e0bb6021f99261ce08f1e52d34"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.451049 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hr5zj" event={"ID":"87f6080d-917b-4d20-a744-9fb3bad43a77","Type":"ContainerStarted","Data":"92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.463972 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7v9g5" podStartSLOduration=125.463942102 podStartE2EDuration="2m5.463942102s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.411069536 +0000 UTC m=+148.376626149" watchObservedRunningTime="2025-10-03 15:30:29.463942102 +0000 UTC m=+148.429498715" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.484628 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.484944 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.486861 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-c8w4s" event={"ID":"972eae77-0b70-430b-bfd3-5a9b4b2ed7b1","Type":"ContainerStarted","Data":"eac308134c746fd9cd8a4780bd3d0ab8b8f9523c4584f0a5038204fc13d4e54a"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.509099 5081 generic.go:334] "Generic (PLEG): container finished" podID="dc15d380-65e8-493c-9e8d-2fbec9f058be" containerID="80b7d92de37beb34f610a9474b32740230f61a4ac2ba0bfde2b83b6cc8e89103" exitCode=0 Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.510252 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" event={"ID":"dc15d380-65e8-493c-9e8d-2fbec9f058be","Type":"ContainerDied","Data":"80b7d92de37beb34f610a9474b32740230f61a4ac2ba0bfde2b83b6cc8e89103"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.512101 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.514277 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.014258452 +0000 UTC m=+148.979815065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.524752 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" podStartSLOduration=125.524726012 podStartE2EDuration="2m5.524726012s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.465299852 +0000 UTC m=+148.430856455" watchObservedRunningTime="2025-10-03 15:30:29.524726012 +0000 UTC m=+148.490282625" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.529204 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kcq4j" podStartSLOduration=125.529186335 podStartE2EDuration="2m5.529186335s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.522797795 +0000 UTC m=+148.488354408" watchObservedRunningTime="2025-10-03 15:30:29.529186335 +0000 UTC m=+148.494742958" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.538656 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4csfn" event={"ID":"4e5bd586-f1c6-4522-af69-ee046c3bbcdb","Type":"ContainerStarted","Data":"525ed8556809ae16446b524a4370a2e3a6e05c69cbca84893d84fcb0dfb72207"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.543093 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.612919 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.614023 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.114005907 +0000 UTC m=+149.079562520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.621644 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6lbmc" event={"ID":"8da083df-54d4-496a-81b6-52afe3a0c4e9","Type":"ContainerStarted","Data":"88a7d9db29481659a0c59f1cb98abf490269aaa22610f612c5d49645c66bf894"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.627288 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qcjc4" podStartSLOduration=125.62726063 podStartE2EDuration="2m5.62726063s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.581198485 +0000 UTC m=+148.546755098" watchObservedRunningTime="2025-10-03 15:30:29.62726063 +0000 UTC m=+148.592817243" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.637156 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" event={"ID":"06ab3f86-8279-44ee-a948-def749e60667","Type":"ContainerStarted","Data":"e1b7dc16f62678e93373305cf16bf0be6934bc04fb30b8332d370936668a0c94"} Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.654674 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-c8w4s" podStartSLOduration=7.654643721 podStartE2EDuration="7.654643721s" podCreationTimestamp="2025-10-03 15:30:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.648785527 +0000 UTC m=+148.614342150" watchObservedRunningTime="2025-10-03 15:30:29.654643721 +0000 UTC m=+148.620200344" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.656637 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-hr5zj" podStartSLOduration=125.656623199 podStartE2EDuration="2m5.656623199s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:29.631586288 +0000 UTC m=+148.597142891" watchObservedRunningTime="2025-10-03 15:30:29.656623199 +0000 UTC m=+148.622179822" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.672352 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7gf65" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.757291 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.771948 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:29 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:29 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:29 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.772042 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.775681 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.275635845 +0000 UTC m=+149.241192458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.866554 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.867046 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.367026122 +0000 UTC m=+149.332582735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:29 crc kubenswrapper[5081]: I1003 15:30:29.968340 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:29 crc kubenswrapper[5081]: E1003 15:30:29.969015 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.468997943 +0000 UTC m=+149.434554556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.071128 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.071760 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.571735576 +0000 UTC m=+149.537292189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.176828 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.177597 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.677582512 +0000 UTC m=+149.643139115 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.278055 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.279008 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.778961465 +0000 UTC m=+149.744518148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.380257 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.380799 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.88077675 +0000 UTC m=+149.846333353 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.481956 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.482467 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.982426221 +0000 UTC m=+149.947982834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.482767 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.483237 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:30.983224574 +0000 UTC m=+149.948781187 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.584388 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.585330 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.085309458 +0000 UTC m=+150.050866071 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.650346 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.650408 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.686961 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.687023 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.687058 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.687398 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.187382922 +0000 UTC m=+150.152939535 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.688979 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" event={"ID":"b3f291d5-f475-4f1d-9291-897465bb6cd7","Type":"ContainerStarted","Data":"65d7d64ddeb91d82df1e8883c0bf06cc01cfb0ef7df3dda9980f8479820110ea"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.690084 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.697438 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.707813 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" event={"ID":"4992f05a-4ae9-4d48-a041-ed3d3f819b94","Type":"ContainerStarted","Data":"a68e6da7bfde702530886d189b148f222daf16598034fe36a93bf6d8f7b29391"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.708604 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.716902 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" event={"ID":"a18d484e-c5e4-4460-8585-e4ccb6ec906c","Type":"ContainerStarted","Data":"4b7224a5cd30d71904bb6d722231dd5eb5ddf12dab898d688f5a7ee48d15cc37"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.716959 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" event={"ID":"a18d484e-c5e4-4460-8585-e4ccb6ec906c","Type":"ContainerStarted","Data":"88049db379cbb68dc1a4e2ff92708c60248af3e69bb5404fed12428be5498ba9"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.728213 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" event={"ID":"f9ea2cb1-8cc0-48f9-8777-c65838994ea4","Type":"ContainerStarted","Data":"3922442c6ecd3639374d9886db7066596d8fd246aa80b4adf8dd92954a70e5aa"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.730197 5081 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-6k8gs container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:5443/healthz\": dial tcp 10.217.0.25:5443: connect: connection refused" start-of-body= Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.730239 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" podUID="4992f05a-4ae9-4d48-a041-ed3d3f819b94" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.25:5443/healthz\": dial tcp 10.217.0.25:5443: connect: connection refused" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.730663 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7z5mn" podStartSLOduration=126.730653384 podStartE2EDuration="2m6.730653384s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.729943283 +0000 UTC m=+149.695499896" watchObservedRunningTime="2025-10-03 15:30:30.730653384 +0000 UTC m=+149.696209997" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.750892 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" event={"ID":"7312c8b5-d029-4dad-99e8-d90247bb08a1","Type":"ContainerStarted","Data":"360a130eb783a069cc46bf5ed2d3994f1a06dfae6ed0d5821216e5b983944cdc"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.753312 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" event={"ID":"dc15d380-65e8-493c-9e8d-2fbec9f058be","Type":"ContainerStarted","Data":"64cbde26f2528720e31859ed74287e7caca0e2cdfd99ab85ae8d43fc615455f5"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.753806 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.769929 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:30 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:30 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:30 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.769996 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.770609 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-h96hj" event={"ID":"7ae3b38e-2c09-4945-abb8-888051f4132a","Type":"ContainerStarted","Data":"503107a36f0872934c6dcb1d35eeab2200b8cb36c0e4a059e06191146b0fedff"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.772265 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" podStartSLOduration=126.772252606 podStartE2EDuration="2m6.772252606s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.771121943 +0000 UTC m=+149.736678566" watchObservedRunningTime="2025-10-03 15:30:30.772252606 +0000 UTC m=+149.737809219" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.789233 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.789406 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.289372973 +0000 UTC m=+150.254929586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.789440 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.789574 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.789756 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.790214 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.290199318 +0000 UTC m=+150.255755931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.797683 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.805465 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" event={"ID":"3c37d766-5afd-4f91-a1bc-ec39345c9a5b","Type":"ContainerStarted","Data":"d40994dc256cac7b845eedfc1e99344baf2523775b91b0ee1f7b301cb0ea0201"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.808185 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.811928 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-c6dxw" podStartSLOduration=126.811899961 podStartE2EDuration="2m6.811899961s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.805029737 +0000 UTC m=+149.770586350" watchObservedRunningTime="2025-10-03 15:30:30.811899961 +0000 UTC m=+149.777456574" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.827629 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" event={"ID":"34b6bb9c-2eff-42e9-b9a6-4b3b8ebc3db7","Type":"ContainerStarted","Data":"b996ac5b9d42aea4698fff6a75f7cc79889f5f2a20ec82e284044ebc5f89afaf"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.833551 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" event={"ID":"06ab3f86-8279-44ee-a948-def749e60667","Type":"ContainerStarted","Data":"4e5d86e31cb8a5d952dabbb0af5c3d9b6a04be4b775d2eefa2098b8cdfeda8c3"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.842531 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" podStartSLOduration=126.842510147 podStartE2EDuration="2m6.842510147s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.840948891 +0000 UTC m=+149.806505504" watchObservedRunningTime="2025-10-03 15:30:30.842510147 +0000 UTC m=+149.808066760" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.843878 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.846210 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" event={"ID":"34fcb740-58cd-4b32-aae6-8381a956ab5d","Type":"ContainerStarted","Data":"9c1b25ff19fabdd5641f00e885ecae93b2276048ad11fd141372eba410d409ed"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.849797 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.850827 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.869277 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-49td8" podStartSLOduration=126.86925264999999 podStartE2EDuration="2m6.86925265s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.867092556 +0000 UTC m=+149.832649169" watchObservedRunningTime="2025-10-03 15:30:30.86925265 +0000 UTC m=+149.834809263" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.885099 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" event={"ID":"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c","Type":"ContainerStarted","Data":"6d928d5105b54b1f949d37351d9c282b22a72fe9fd6d0da55e561c85c3cfceef"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.893994 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.894173 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.394151247 +0000 UTC m=+150.359707860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.894360 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.895963 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.39595469 +0000 UTC m=+150.361511303 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.897129 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sjkfz" podStartSLOduration=126.897118255 podStartE2EDuration="2m6.897118255s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.89358776 +0000 UTC m=+149.859144373" watchObservedRunningTime="2025-10-03 15:30:30.897118255 +0000 UTC m=+149.862674868" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.910928 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" event={"ID":"68d4e48c-5240-4e89-84cf-c837158cc6b8","Type":"ContainerStarted","Data":"e0ba873654a3fd857b951f842c6ef85fc264808e401d6757e961c492130739ab"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.930719 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-twgch" podStartSLOduration=126.93069593 podStartE2EDuration="2m6.93069593s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.928928827 +0000 UTC m=+149.894485460" watchObservedRunningTime="2025-10-03 15:30:30.93069593 +0000 UTC m=+149.896252543" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.936384 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" event={"ID":"5624e6ea-f7c1-40bf-8463-45773373945d","Type":"ContainerStarted","Data":"7803e3dd346b9d68425f794970aaa6daec1314367da2139fa1e6b30c22bb25f5"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.937484 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.942475 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4csfn" event={"ID":"4e5bd586-f1c6-4522-af69-ee046c3bbcdb","Type":"ContainerStarted","Data":"be7ce75a8a537dee44f74a39070d359003f63d89bed28580dc4bdce60c5058f4"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.957578 5081 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-jrrbf container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.957671 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" podUID="5624e6ea-f7c1-40bf-8463-45773373945d" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.966879 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" podStartSLOduration=126.966856411 podStartE2EDuration="2m6.966856411s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:30.965670006 +0000 UTC m=+149.931226639" watchObservedRunningTime="2025-10-03 15:30:30.966856411 +0000 UTC m=+149.932413024" Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.981758 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" event={"ID":"b43aaaa2-f6bb-449b-90ce-d7324dd5a06d","Type":"ContainerStarted","Data":"14dcb95a4a1ccc50be7fc75485385061e38879b7d78f2eacbe20d5126742382a"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.991844 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" event={"ID":"c9c368d9-2375-4cef-8476-ce97bf83ab77","Type":"ContainerStarted","Data":"5c505b7acc8a9f7bedc46216a6fca48affe3b5ca41a0507e07867b15e38fb9c0"} Oct 03 15:30:30 crc kubenswrapper[5081]: I1003 15:30:30.995068 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:30 crc kubenswrapper[5081]: E1003 15:30:30.996058 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.496038035 +0000 UTC m=+150.461594648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.009612 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" event={"ID":"48df3d15-6274-47b2-a109-9c9834e35563","Type":"ContainerStarted","Data":"968b3f37571b6584cdb313f5483c6861160192c38f9f60d67cd731bcbba7f8cf"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.010629 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.035842 5081 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-72wbm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.035903 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" podUID="48df3d15-6274-47b2-a109-9c9834e35563" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.040884 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-7npb2" podStartSLOduration=127.040870073 podStartE2EDuration="2m7.040870073s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.016138041 +0000 UTC m=+149.981694674" watchObservedRunningTime="2025-10-03 15:30:31.040870073 +0000 UTC m=+150.006426696" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.045423 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-qk55l" podStartSLOduration=127.045414768 podStartE2EDuration="2m7.045414768s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.041332197 +0000 UTC m=+150.006888840" watchObservedRunningTime="2025-10-03 15:30:31.045414768 +0000 UTC m=+150.010971381" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.053535 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.093783 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" podStartSLOduration=127.0937556 podStartE2EDuration="2m7.0937556s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.09173355 +0000 UTC m=+150.057290163" watchObservedRunningTime="2025-10-03 15:30:31.0937556 +0000 UTC m=+150.059312213" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.102500 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.102929 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.602913791 +0000 UTC m=+150.568470404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.118640 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-smss4" event={"ID":"124c4817-4bb4-49ab-b36c-16cb1f95f4d0","Type":"ContainerStarted","Data":"2b79f1852ff4b28f5cec64ab816110f695bb5de51d2b274c9067314c774d6a3c"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.122691 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-4csfn" podStartSLOduration=9.122669756 podStartE2EDuration="9.122669756s" podCreationTimestamp="2025-10-03 15:30:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.119894294 +0000 UTC m=+150.085450917" watchObservedRunningTime="2025-10-03 15:30:31.122669756 +0000 UTC m=+150.088226369" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.128221 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" event={"ID":"7c45a715-d81c-4b98-a687-e42af0efee14","Type":"ContainerStarted","Data":"7e83cc77e97ddcbb88f1d4d00788b06c19fa247da903ac59465dec1bdf2db03c"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.146186 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" event={"ID":"24a2b864-5a51-4283-8d44-578a6d40a6ce","Type":"ContainerStarted","Data":"d8cc2441878ec63c9755189a7a4fc6e113058f5c063937ca8f083e28a7584d2c"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.159194 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kf9l4" podStartSLOduration=127.159170778 podStartE2EDuration="2m7.159170778s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.15622119 +0000 UTC m=+150.121777803" watchObservedRunningTime="2025-10-03 15:30:31.159170778 +0000 UTC m=+150.124727391" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.164372 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" event={"ID":"185c2ecc-dbb0-4666-b3ff-72f099427ccc","Type":"ContainerStarted","Data":"9ee7d525a50d5beabe39086bfc2abd7d3ef50ee9998f4359478be6d79abd9bb9"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.174455 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" event={"ID":"31ab5383-1898-4964-8e8b-406b81b83fab","Type":"ContainerStarted","Data":"7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.178260 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.179338 5081 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8p57q container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.179413 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" podUID="31ab5383-1898-4964-8e8b-406b81b83fab" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.192698 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" event={"ID":"b4b405fa-53ce-43ea-ba96-cf08df54171c","Type":"ContainerStarted","Data":"471746111b736f30de63d53db78984f184bbf18458664ef001e5d4748b4a8daa"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.192748 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" event={"ID":"b4b405fa-53ce-43ea-ba96-cf08df54171c","Type":"ContainerStarted","Data":"2ba08fadae4fe9cae47a3390a2c369df0e256604e363eb57eaa7f03c04e66eba"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.192982 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" podStartSLOduration=127.192955458 podStartE2EDuration="2m7.192955458s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.188902728 +0000 UTC m=+150.154459361" watchObservedRunningTime="2025-10-03 15:30:31.192955458 +0000 UTC m=+150.158512081" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.204413 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.205834 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.705808159 +0000 UTC m=+150.671364772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.223003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" event={"ID":"84647337-65f5-40fe-9ee7-62f1faebb04c","Type":"ContainerStarted","Data":"e9943fad6bd30f789f8b962b80889164f99ed607b309abf821132dff9e5a3cdf"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.238747 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" podStartSLOduration=127.238724534 podStartE2EDuration="2m7.238724534s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.235631093 +0000 UTC m=+150.201187706" watchObservedRunningTime="2025-10-03 15:30:31.238724534 +0000 UTC m=+150.204281147" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.258169 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9ksc9" event={"ID":"255659df-1859-49fe-8ced-0d05be9c5c4e","Type":"ContainerStarted","Data":"727b769f77f436c8d6a3f3ab6ed7629a3292d394ebc85429388af6eb24bc716e"} Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.267373 5081 patch_prober.go:28] interesting pod/downloads-7954f5f757-7v9g5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.267439 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7v9g5" podUID="f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.308090 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.315971 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.815951982 +0000 UTC m=+150.781508595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.338075 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mpp4g" podStartSLOduration=127.338038326 podStartE2EDuration="2m7.338038326s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.281393418 +0000 UTC m=+150.246950041" watchObservedRunningTime="2025-10-03 15:30:31.338038326 +0000 UTC m=+150.303594939" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.339049 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-smss4" podStartSLOduration=127.339041566 podStartE2EDuration="2m7.339041566s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.329952226 +0000 UTC m=+150.295508849" watchObservedRunningTime="2025-10-03 15:30:31.339041566 +0000 UTC m=+150.304598189" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.372915 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" podStartSLOduration=31.372888248 podStartE2EDuration="31.372888248s" podCreationTimestamp="2025-10-03 15:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.372788735 +0000 UTC m=+150.338345358" watchObservedRunningTime="2025-10-03 15:30:31.372888248 +0000 UTC m=+150.338444861" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.411294 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.413002 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:31.912973826 +0000 UTC m=+150.878530639 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.414494 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lk9tv" podStartSLOduration=127.4144742 podStartE2EDuration="2m7.4144742s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.400092584 +0000 UTC m=+150.365649197" watchObservedRunningTime="2025-10-03 15:30:31.4144742 +0000 UTC m=+150.380030823" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.435815 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jkbhb" podStartSLOduration=127.435791312 podStartE2EDuration="2m7.435791312s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:31.435195244 +0000 UTC m=+150.400751867" watchObservedRunningTime="2025-10-03 15:30:31.435791312 +0000 UTC m=+150.401347925" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.519123 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.519888 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.019872872 +0000 UTC m=+150.985429475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.620194 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.621015 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.120998438 +0000 UTC m=+151.086555051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.723511 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.723973 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.223948108 +0000 UTC m=+151.189504721 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.765707 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:31 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:31 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:31 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.765769 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.827623 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.829675 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.329648549 +0000 UTC m=+151.295205162 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:31 crc kubenswrapper[5081]: I1003 15:30:31.930423 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:31 crc kubenswrapper[5081]: E1003 15:30:31.930831 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.430816616 +0000 UTC m=+151.396373229 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.031579 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.032059 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.532032184 +0000 UTC m=+151.497588797 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.032326 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.032769 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.532753085 +0000 UTC m=+151.498309698 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.133416 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.133966 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.633920722 +0000 UTC m=+151.599477335 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.235172 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.235938 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.735921914 +0000 UTC m=+151.701478527 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.339313 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.339693 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.839672217 +0000 UTC m=+151.805228820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.341304 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9ksc9" event={"ID":"255659df-1859-49fe-8ced-0d05be9c5c4e","Type":"ContainerStarted","Data":"48646114608d906f184acca61331822406c1243179bb09d3434dd27553332ad5"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.342543 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.357525 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" event={"ID":"a18d484e-c5e4-4460-8585-e4ccb6ec906c","Type":"ContainerStarted","Data":"4f24286d8dc4eed541b4a5acc439bd86784a3a7396053aa3d48908da89623d81"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.371099 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" event={"ID":"8bf68ff9-fd7d-4e0b-a406-dccfee09bb1c","Type":"ContainerStarted","Data":"6ffcb4a61d12aa235bcc9119106e4e840b0a3d0aa2f83c39bd1ee078351dc6de"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.374474 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-9ksc9" podStartSLOduration=10.374450957 podStartE2EDuration="10.374450957s" podCreationTimestamp="2025-10-03 15:30:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:32.366033378 +0000 UTC m=+151.331590001" watchObservedRunningTime="2025-10-03 15:30:32.374450957 +0000 UTC m=+151.340007560" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.379052 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d7304ac5f6a61aca5ec3991634c1d032f92d96408a268fab21f8a3e6fdfded44"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.379104 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c0d8c0402472c9e2549a5c2a8cf1a925c54c9618eb58a5c9334f56c23145fca1"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.387374 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"98ffe70c3efaafb4bd0040944b62dd15361650b5dded9f34a86011be2b59db9c"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.394968 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b59f5" podStartSLOduration=128.394945864 podStartE2EDuration="2m8.394945864s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:32.391907534 +0000 UTC m=+151.357464157" watchObservedRunningTime="2025-10-03 15:30:32.394945864 +0000 UTC m=+151.360502477" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.401324 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" event={"ID":"c9c368d9-2375-4cef-8476-ce97bf83ab77","Type":"ContainerStarted","Data":"e0046e157246f865ad197181f8354675ca8d096f725db398d7d33d887305d3e7"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.412955 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"e66b5393b8fcf42598de3c220e663a8b4128603421c1398344be254c7e96e67f"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.413010 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d1e13792f5f0d4dd1aeaadca782bc765f3222b8775a6f16f6f790c9e4dbc24cb"} Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.413582 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.416712 5081 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8p57q container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.416770 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" podUID="31ab5383-1898-4964-8e8b-406b81b83fab" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.416973 5081 patch_prober.go:28] interesting pod/downloads-7954f5f757-7v9g5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.417024 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7v9g5" podUID="f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.427985 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-72wbm" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.434065 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jrrbf" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.445461 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.448058 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:32.948044437 +0000 UTC m=+151.913601050 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.514055 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-qkzll" podStartSLOduration=128.514031602 podStartE2EDuration="2m8.514031602s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:32.490800164 +0000 UTC m=+151.456356777" watchObservedRunningTime="2025-10-03 15:30:32.514031602 +0000 UTC m=+151.479588215" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.547365 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.549904 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.049875494 +0000 UTC m=+152.015432287 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.650009 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.650384 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.15037021 +0000 UTC m=+152.115926823 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.679607 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" podStartSLOduration=128.679585046 podStartE2EDuration="2m8.679585046s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:32.675107523 +0000 UTC m=+151.640664126" watchObservedRunningTime="2025-10-03 15:30:32.679585046 +0000 UTC m=+151.645141659" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.752997 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.753782 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.253762793 +0000 UTC m=+152.219319406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.769399 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:32 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:32 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:32 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.769496 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.860379 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.864379 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.364350179 +0000 UTC m=+152.329906792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.964782 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.965086 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.465031122 +0000 UTC m=+152.430587725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:32 crc kubenswrapper[5081]: I1003 15:30:32.965213 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:32 crc kubenswrapper[5081]: E1003 15:30:32.965934 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.465925628 +0000 UTC m=+152.431482241 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.067203 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.067746 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.567721264 +0000 UTC m=+152.533277877 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.169529 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.170123 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.670096917 +0000 UTC m=+152.635653710 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.218008 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.218828 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.230722 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.230969 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.237738 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.271153 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.271329 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.771303635 +0000 UTC m=+152.736860248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.271460 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.271836 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.77182877 +0000 UTC m=+152.737385383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.373001 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.373664 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.373709 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.373955 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.873934315 +0000 UTC m=+152.839490928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.415045 5081 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-6k8gs container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.415110 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" podUID="4992f05a-4ae9-4d48-a041-ed3d3f819b94" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.25:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.433440 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f3325149e088fc3fdc5849061ec8421907ef701f78d838ae1a12505e5ae93d27"} Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.436986 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" event={"ID":"185c2ecc-dbb0-4666-b3ff-72f099427ccc","Type":"ContainerStarted","Data":"155adbecc245d4df40376157bb33477d79a2e6251a044bd6043fbac119f90c41"} Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.475866 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.475926 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.475961 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.476324 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:33.976310248 +0000 UTC m=+152.941866861 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.476496 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.541337 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.577076 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.578054 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.07800947 +0000 UTC m=+153.043566083 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.624553 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6pt7z"] Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.625852 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.630974 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.679745 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.680233 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.180214858 +0000 UTC m=+153.145771471 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.693919 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6pt7z"] Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.768123 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:33 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:33 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:33 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.768716 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.780730 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.781083 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nllj\" (UniqueName: \"kubernetes.io/projected/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-kube-api-access-9nllj\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.781144 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-utilities\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.781173 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-catalog-content\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.781310 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.281286852 +0000 UTC m=+153.246843465 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.803320 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-phpqk"] Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.804744 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.807903 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.820843 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-phpqk"] Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.833805 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.850789 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5jvtx" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.882605 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-catalog-content\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.882695 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.882930 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nllj\" (UniqueName: \"kubernetes.io/projected/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-kube-api-access-9nllj\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.883053 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-utilities\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.883078 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.383058647 +0000 UTC m=+153.348615450 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.883114 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8qdd\" (UniqueName: \"kubernetes.io/projected/a20eff2d-16a6-4023-ae43-29d16ed9c041-kube-api-access-j8qdd\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.883144 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-catalog-content\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.883181 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-utilities\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.883815 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-utilities\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.883859 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-catalog-content\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.885332 5081 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.904216 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6k8gs" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.920526 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nllj\" (UniqueName: \"kubernetes.io/projected/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-kube-api-access-9nllj\") pod \"certified-operators-6pt7z\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.951590 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.984277 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.984655 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-catalog-content\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.984728 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.484684376 +0000 UTC m=+153.450240999 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.984807 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.985074 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8qdd\" (UniqueName: \"kubernetes.io/projected/a20eff2d-16a6-4023-ae43-29d16ed9c041-kube-api-access-j8qdd\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.985165 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-utilities\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: E1003 15:30:33.985982 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.485953534 +0000 UTC m=+153.451510327 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.988781 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-catalog-content\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:33 crc kubenswrapper[5081]: I1003 15:30:33.989070 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-utilities\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.028365 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9j2m7"] Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.032055 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.035116 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8qdd\" (UniqueName: \"kubernetes.io/projected/a20eff2d-16a6-4023-ae43-29d16ed9c041-kube-api-access-j8qdd\") pod \"community-operators-phpqk\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.055087 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9j2m7"] Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.086444 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:34 crc kubenswrapper[5081]: E1003 15:30:34.086777 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.586729639 +0000 UTC m=+153.552286262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.087418 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:34 crc kubenswrapper[5081]: E1003 15:30:34.087882 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 15:30:34.587859882 +0000 UTC m=+153.553416495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vk4jz" (UID: "86928421-ee7e-4823-9483-80d3d4855283") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.111712 5081 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-03T15:30:33.885365355Z","Handler":null,"Name":""} Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.120883 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.165146 5081 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.165198 5081 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.188983 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.189361 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-catalog-content\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.189457 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zczx5\" (UniqueName: \"kubernetes.io/projected/03e7d066-14df-4d26-a8aa-468168acc0a3-kube-api-access-zczx5\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.189523 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-utilities\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.201100 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.214683 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5htzp"] Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.218136 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.235070 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5htzp"] Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.291252 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lswbr\" (UniqueName: \"kubernetes.io/projected/b975a9ae-a790-4a4b-b3da-0e21ab595446-kube-api-access-lswbr\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.291314 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-catalog-content\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.291380 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-utilities\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.291442 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-catalog-content\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.291486 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-utilities\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.291577 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.291621 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zczx5\" (UniqueName: \"kubernetes.io/projected/03e7d066-14df-4d26-a8aa-468168acc0a3-kube-api-access-zczx5\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.293356 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-utilities\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.293678 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-catalog-content\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.300056 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6pt7z"] Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.319487 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zczx5\" (UniqueName: \"kubernetes.io/projected/03e7d066-14df-4d26-a8aa-468168acc0a3-kube-api-access-zczx5\") pod \"certified-operators-9j2m7\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: W1003 15:30:34.319532 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5fac9b5_842e_4913_8c2f_6cfb992f3e1d.slice/crio-0453823266cdf3b5ca947a6fc24ef842e3817bdceb1a475c58962427100bec93 WatchSource:0}: Error finding container 0453823266cdf3b5ca947a6fc24ef842e3817bdceb1a475c58962427100bec93: Status 404 returned error can't find the container with id 0453823266cdf3b5ca947a6fc24ef842e3817bdceb1a475c58962427100bec93 Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.341403 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 15:30:34 crc kubenswrapper[5081]: W1003 15:30:34.347821 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poddcc78925_9fb6_4e69_a028_9f876ac04fbb.slice/crio-9f47570d65f1c3f503ae068dec5ee84ba770b3a43287f68912be55f2f7b61dc4 WatchSource:0}: Error finding container 9f47570d65f1c3f503ae068dec5ee84ba770b3a43287f68912be55f2f7b61dc4: Status 404 returned error can't find the container with id 9f47570d65f1c3f503ae068dec5ee84ba770b3a43287f68912be55f2f7b61dc4 Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.356622 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.356680 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.385323 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.394246 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-utilities\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.395247 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lswbr\" (UniqueName: \"kubernetes.io/projected/b975a9ae-a790-4a4b-b3da-0e21ab595446-kube-api-access-lswbr\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.395285 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-catalog-content\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.395752 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-catalog-content\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.395054 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-utilities\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.417877 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lswbr\" (UniqueName: \"kubernetes.io/projected/b975a9ae-a790-4a4b-b3da-0e21ab595446-kube-api-access-lswbr\") pod \"community-operators-5htzp\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.445721 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vk4jz\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.467084 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" event={"ID":"185c2ecc-dbb0-4666-b3ff-72f099427ccc","Type":"ContainerStarted","Data":"1c6edfe8b01b41cd053703e00e0769d4327a3b33f94d8080c18d7e2b68808825"} Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.467157 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" event={"ID":"185c2ecc-dbb0-4666-b3ff-72f099427ccc","Type":"ContainerStarted","Data":"a9b5527b6ca435a49a6deb3b99fc5636f2885b78692b0abd3c3df9ef1855bd9d"} Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.472535 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6pt7z" event={"ID":"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d","Type":"ContainerStarted","Data":"0453823266cdf3b5ca947a6fc24ef842e3817bdceb1a475c58962427100bec93"} Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.480031 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcc78925-9fb6-4e69-a028-9f876ac04fbb","Type":"ContainerStarted","Data":"9f47570d65f1c3f503ae068dec5ee84ba770b3a43287f68912be55f2f7b61dc4"} Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.492929 5081 generic.go:334] "Generic (PLEG): container finished" podID="24a2b864-5a51-4283-8d44-578a6d40a6ce" containerID="d8cc2441878ec63c9755189a7a4fc6e113058f5c063937ca8f083e28a7584d2c" exitCode=0 Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.494523 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" event={"ID":"24a2b864-5a51-4283-8d44-578a6d40a6ce","Type":"ContainerDied","Data":"d8cc2441878ec63c9755189a7a4fc6e113058f5c063937ca8f083e28a7584d2c"} Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.499409 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-clvgs" podStartSLOduration=12.499388863 podStartE2EDuration="12.499388863s" podCreationTimestamp="2025-10-03 15:30:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:34.497466466 +0000 UTC m=+153.463023079" watchObservedRunningTime="2025-10-03 15:30:34.499388863 +0000 UTC m=+153.464945476" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.547945 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.555844 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.772950 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:34 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:34 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:34 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.773032 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.796839 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-phpqk"] Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.806708 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9j2m7"] Oct 03 15:30:34 crc kubenswrapper[5081]: I1003 15:30:34.935236 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5htzp"] Oct 03 15:30:34 crc kubenswrapper[5081]: W1003 15:30:34.949406 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb975a9ae_a790_4a4b_b3da_0e21ab595446.slice/crio-2947cd1a707fc5a466d622da535b5914bed266e6b6b51950624668bf2e660c26 WatchSource:0}: Error finding container 2947cd1a707fc5a466d622da535b5914bed266e6b6b51950624668bf2e660c26: Status 404 returned error can't find the container with id 2947cd1a707fc5a466d622da535b5914bed266e6b6b51950624668bf2e660c26 Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.019174 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vk4jz"] Oct 03 15:30:35 crc kubenswrapper[5081]: W1003 15:30:35.022411 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86928421_ee7e_4823_9483_80d3d4855283.slice/crio-decf891f83bb3a297b4367d1816602bd57122541cc4be181ccf5af1c21988167 WatchSource:0}: Error finding container decf891f83bb3a297b4367d1816602bd57122541cc4be181ccf5af1c21988167: Status 404 returned error can't find the container with id decf891f83bb3a297b4367d1816602bd57122541cc4be181ccf5af1c21988167 Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.332235 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.332296 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.341262 5081 patch_prober.go:28] interesting pod/apiserver-76f77b778f-pz8gv container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]log ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]etcd ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/generic-apiserver-start-informers ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/max-in-flight-filter ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 03 15:30:35 crc kubenswrapper[5081]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 03 15:30:35 crc kubenswrapper[5081]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/project.openshift.io-projectcache ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/openshift.io-startinformers ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 03 15:30:35 crc kubenswrapper[5081]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 03 15:30:35 crc kubenswrapper[5081]: livez check failed Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.341334 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" podUID="c9c368d9-2375-4cef-8476-ce97bf83ab77" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.341891 5081 patch_prober.go:28] interesting pod/downloads-7954f5f757-7v9g5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.341901 5081 patch_prober.go:28] interesting pod/downloads-7954f5f757-7v9g5 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.341961 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7v9g5" podUID="f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.341963 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7v9g5" podUID="f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.501170 5081 generic.go:334] "Generic (PLEG): container finished" podID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerID="5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb" exitCode=0 Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.501288 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6pt7z" event={"ID":"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d","Type":"ContainerDied","Data":"5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.503552 5081 generic.go:334] "Generic (PLEG): container finished" podID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerID="5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140" exitCode=0 Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.503708 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9j2m7" event={"ID":"03e7d066-14df-4d26-a8aa-468168acc0a3","Type":"ContainerDied","Data":"5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.503813 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9j2m7" event={"ID":"03e7d066-14df-4d26-a8aa-468168acc0a3","Type":"ContainerStarted","Data":"eeea7672ce92136aa860e367d74548ea9223ecb2247763d647d35ee6879d8418"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.505670 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.506014 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcc78925-9fb6-4e69-a028-9f876ac04fbb","Type":"ContainerStarted","Data":"f2ded815e4038e3fdba29630d1d4dd0a7ddd9e49bb3b0a356f98e2dc875600c7"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.508149 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" event={"ID":"86928421-ee7e-4823-9483-80d3d4855283","Type":"ContainerStarted","Data":"81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.508192 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" event={"ID":"86928421-ee7e-4823-9483-80d3d4855283","Type":"ContainerStarted","Data":"decf891f83bb3a297b4367d1816602bd57122541cc4be181ccf5af1c21988167"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.508709 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.513280 5081 generic.go:334] "Generic (PLEG): container finished" podID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerID="b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0" exitCode=0 Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.513340 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5htzp" event={"ID":"b975a9ae-a790-4a4b-b3da-0e21ab595446","Type":"ContainerDied","Data":"b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.513359 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5htzp" event={"ID":"b975a9ae-a790-4a4b-b3da-0e21ab595446","Type":"ContainerStarted","Data":"2947cd1a707fc5a466d622da535b5914bed266e6b6b51950624668bf2e660c26"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.517479 5081 generic.go:334] "Generic (PLEG): container finished" podID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerID="20df0b8d991fba32685a3237eb68af4ce0c627ee63a239e2e1f93bc2bcda106d" exitCode=0 Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.517631 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phpqk" event={"ID":"a20eff2d-16a6-4023-ae43-29d16ed9c041","Type":"ContainerDied","Data":"20df0b8d991fba32685a3237eb68af4ce0c627ee63a239e2e1f93bc2bcda106d"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.517680 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phpqk" event={"ID":"a20eff2d-16a6-4023-ae43-29d16ed9c041","Type":"ContainerStarted","Data":"40522bce61e7fc6947a29641c1eaf38b60f4ed7feda9994a2e550eea24c3c538"} Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.547283 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.547256363 podStartE2EDuration="2.547256363s" podCreationTimestamp="2025-10-03 15:30:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:35.547214982 +0000 UTC m=+154.512771605" watchObservedRunningTime="2025-10-03 15:30:35.547256363 +0000 UTC m=+154.512812976" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.623667 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" podStartSLOduration=131.623642406 podStartE2EDuration="2m11.623642406s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:35.623219013 +0000 UTC m=+154.588775626" watchObservedRunningTime="2025-10-03 15:30:35.623642406 +0000 UTC m=+154.589199019" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.687257 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.687380 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.700991 5081 patch_prober.go:28] interesting pod/console-f9d7485db-hr5zj container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.701073 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-hr5zj" podUID="87f6080d-917b-4d20-a744-9fb3bad43a77" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.760129 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.764471 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:35 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:35 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:35 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.764574 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.791827 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.806931 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fg69b"] Oct 03 15:30:35 crc kubenswrapper[5081]: E1003 15:30:35.807223 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24a2b864-5a51-4283-8d44-578a6d40a6ce" containerName="collect-profiles" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.807239 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="24a2b864-5a51-4283-8d44-578a6d40a6ce" containerName="collect-profiles" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.807366 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="24a2b864-5a51-4283-8d44-578a6d40a6ce" containerName="collect-profiles" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.808241 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.810325 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.826931 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg69b"] Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.847604 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.928226 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6l2z\" (UniqueName: \"kubernetes.io/projected/24a2b864-5a51-4283-8d44-578a6d40a6ce-kube-api-access-d6l2z\") pod \"24a2b864-5a51-4283-8d44-578a6d40a6ce\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.928368 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24a2b864-5a51-4283-8d44-578a6d40a6ce-secret-volume\") pod \"24a2b864-5a51-4283-8d44-578a6d40a6ce\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.928433 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24a2b864-5a51-4283-8d44-578a6d40a6ce-config-volume\") pod \"24a2b864-5a51-4283-8d44-578a6d40a6ce\" (UID: \"24a2b864-5a51-4283-8d44-578a6d40a6ce\") " Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.928653 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwklt\" (UniqueName: \"kubernetes.io/projected/43899581-01e6-4eda-87cd-33f5b3d6879b-kube-api-access-xwklt\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.928827 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-catalog-content\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.928905 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-utilities\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.930207 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24a2b864-5a51-4283-8d44-578a6d40a6ce-config-volume" (OuterVolumeSpecName: "config-volume") pod "24a2b864-5a51-4283-8d44-578a6d40a6ce" (UID: "24a2b864-5a51-4283-8d44-578a6d40a6ce"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.936402 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24a2b864-5a51-4283-8d44-578a6d40a6ce-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "24a2b864-5a51-4283-8d44-578a6d40a6ce" (UID: "24a2b864-5a51-4283-8d44-578a6d40a6ce"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:30:35 crc kubenswrapper[5081]: I1003 15:30:35.937539 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24a2b864-5a51-4283-8d44-578a6d40a6ce-kube-api-access-d6l2z" (OuterVolumeSpecName: "kube-api-access-d6l2z") pod "24a2b864-5a51-4283-8d44-578a6d40a6ce" (UID: "24a2b864-5a51-4283-8d44-578a6d40a6ce"). InnerVolumeSpecName "kube-api-access-d6l2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.030294 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwklt\" (UniqueName: \"kubernetes.io/projected/43899581-01e6-4eda-87cd-33f5b3d6879b-kube-api-access-xwklt\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.030846 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-catalog-content\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.030909 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-utilities\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.030976 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24a2b864-5a51-4283-8d44-578a6d40a6ce-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.030995 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24a2b864-5a51-4283-8d44-578a6d40a6ce-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.031008 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6l2z\" (UniqueName: \"kubernetes.io/projected/24a2b864-5a51-4283-8d44-578a6d40a6ce-kube-api-access-d6l2z\") on node \"crc\" DevicePath \"\"" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.031416 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-catalog-content\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.031432 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-utilities\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.047778 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwklt\" (UniqueName: \"kubernetes.io/projected/43899581-01e6-4eda-87cd-33f5b3d6879b-kube-api-access-xwklt\") pod \"redhat-marketplace-fg69b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.126372 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.215143 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jrrss"] Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.216302 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.227112 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jrrss"] Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.259005 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.343587 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-catalog-content\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.343703 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9b5k\" (UniqueName: \"kubernetes.io/projected/e15fe7f8-7909-4553-97d1-17417fc0b111-kube-api-access-w9b5k\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.343738 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-utilities\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.355479 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg69b"] Oct 03 15:30:36 crc kubenswrapper[5081]: W1003 15:30:36.366711 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43899581_01e6_4eda_87cd_33f5b3d6879b.slice/crio-a27592e9d669ebdd06bbb5177d4de7e2e6e6e44cb438c072fabcb49f901c4ed7 WatchSource:0}: Error finding container a27592e9d669ebdd06bbb5177d4de7e2e6e6e44cb438c072fabcb49f901c4ed7: Status 404 returned error can't find the container with id a27592e9d669ebdd06bbb5177d4de7e2e6e6e44cb438c072fabcb49f901c4ed7 Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.445359 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-catalog-content\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.445472 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9b5k\" (UniqueName: \"kubernetes.io/projected/e15fe7f8-7909-4553-97d1-17417fc0b111-kube-api-access-w9b5k\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.445500 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-utilities\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.446291 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-catalog-content\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.446361 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-utilities\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.475381 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9b5k\" (UniqueName: \"kubernetes.io/projected/e15fe7f8-7909-4553-97d1-17417fc0b111-kube-api-access-w9b5k\") pod \"redhat-marketplace-jrrss\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.549047 5081 generic.go:334] "Generic (PLEG): container finished" podID="dcc78925-9fb6-4e69-a028-9f876ac04fbb" containerID="f2ded815e4038e3fdba29630d1d4dd0a7ddd9e49bb3b0a356f98e2dc875600c7" exitCode=0 Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.549297 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcc78925-9fb6-4e69-a028-9f876ac04fbb","Type":"ContainerDied","Data":"f2ded815e4038e3fdba29630d1d4dd0a7ddd9e49bb3b0a356f98e2dc875600c7"} Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.559103 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg69b" event={"ID":"43899581-01e6-4eda-87cd-33f5b3d6879b","Type":"ContainerStarted","Data":"a27592e9d669ebdd06bbb5177d4de7e2e6e6e44cb438c072fabcb49f901c4ed7"} Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.562753 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.587852 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.588246 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r" event={"ID":"24a2b864-5a51-4283-8d44-578a6d40a6ce","Type":"ContainerDied","Data":"9fd99c6a78d401a2b54ed5d1d022ed74e7558a24ceddec85898501ec1dffaa4d"} Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.588285 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fd99c6a78d401a2b54ed5d1d022ed74e7558a24ceddec85898501ec1dffaa4d" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.770960 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:36 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:36 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:36 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.771094 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.807777 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b4b85"] Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.808970 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.811783 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.830854 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b4b85"] Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.894622 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.895633 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.899020 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.905477 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.907920 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.919944 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jrrss"] Oct 03 15:30:36 crc kubenswrapper[5081]: W1003 15:30:36.955271 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode15fe7f8_7909_4553_97d1_17417fc0b111.slice/crio-bbf677e278232ffd6130831b6888dfcc5d0d9d659026ee55198ffdde3524d0a8 WatchSource:0}: Error finding container bbf677e278232ffd6130831b6888dfcc5d0d9d659026ee55198ffdde3524d0a8: Status 404 returned error can't find the container with id bbf677e278232ffd6130831b6888dfcc5d0d9d659026ee55198ffdde3524d0a8 Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.957309 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-utilities\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.957445 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-catalog-content\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:36 crc kubenswrapper[5081]: I1003 15:30:36.957531 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzps8\" (UniqueName: \"kubernetes.io/projected/6c3b8de3-3277-44c9-b6d8-20f784938901-kube-api-access-kzps8\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.058897 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-utilities\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.058961 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-catalog-content\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.058999 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzps8\" (UniqueName: \"kubernetes.io/projected/6c3b8de3-3277-44c9-b6d8-20f784938901-kube-api-access-kzps8\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.059027 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.059054 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.059663 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-utilities\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.059832 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-catalog-content\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.092510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzps8\" (UniqueName: \"kubernetes.io/projected/6c3b8de3-3277-44c9-b6d8-20f784938901-kube-api-access-kzps8\") pod \"redhat-operators-b4b85\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.150956 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.161127 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.161191 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.161374 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.181715 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.216378 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bcl7h"] Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.218258 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bcl7h"] Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.218391 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.224428 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.365497 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqc4b\" (UniqueName: \"kubernetes.io/projected/e534f1b5-437a-4e9a-9f68-247a5b1fad20-kube-api-access-pqc4b\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.366041 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-utilities\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.366188 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-catalog-content\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.466675 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.467941 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqc4b\" (UniqueName: \"kubernetes.io/projected/e534f1b5-437a-4e9a-9f68-247a5b1fad20-kube-api-access-pqc4b\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.468479 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-utilities\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.468511 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-catalog-content\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.469453 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-catalog-content\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.469752 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-utilities\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: W1003 15:30:37.484650 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod6a9fa8c4_0595_48b8_99c8_4ddda023045b.slice/crio-77209bf29dae990121528f8ed33a17143d8bbafad24fdf3e5e0164ec8893e860 WatchSource:0}: Error finding container 77209bf29dae990121528f8ed33a17143d8bbafad24fdf3e5e0164ec8893e860: Status 404 returned error can't find the container with id 77209bf29dae990121528f8ed33a17143d8bbafad24fdf3e5e0164ec8893e860 Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.495017 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqc4b\" (UniqueName: \"kubernetes.io/projected/e534f1b5-437a-4e9a-9f68-247a5b1fad20-kube-api-access-pqc4b\") pod \"redhat-operators-bcl7h\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.531310 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b4b85"] Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.538039 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.653269 5081 generic.go:334] "Generic (PLEG): container finished" podID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerID="33a550ecbb68dd5d7a29385a58ffe4dd48e35fc96108974bbd5559503063f3f5" exitCode=0 Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.653351 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg69b" event={"ID":"43899581-01e6-4eda-87cd-33f5b3d6879b","Type":"ContainerDied","Data":"33a550ecbb68dd5d7a29385a58ffe4dd48e35fc96108974bbd5559503063f3f5"} Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.657897 5081 generic.go:334] "Generic (PLEG): container finished" podID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerID="0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d" exitCode=0 Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.658027 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jrrss" event={"ID":"e15fe7f8-7909-4553-97d1-17417fc0b111","Type":"ContainerDied","Data":"0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d"} Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.658063 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jrrss" event={"ID":"e15fe7f8-7909-4553-97d1-17417fc0b111","Type":"ContainerStarted","Data":"bbf677e278232ffd6130831b6888dfcc5d0d9d659026ee55198ffdde3524d0a8"} Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.659891 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6a9fa8c4-0595-48b8-99c8-4ddda023045b","Type":"ContainerStarted","Data":"77209bf29dae990121528f8ed33a17143d8bbafad24fdf3e5e0164ec8893e860"} Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.662495 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b4b85" event={"ID":"6c3b8de3-3277-44c9-b6d8-20f784938901","Type":"ContainerStarted","Data":"61b1fb73fd73cbd99ee880f8152de16008334afdb54803b55390a0fdd55c9911"} Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.753266 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.764395 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:37 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:37 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:37 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:37 crc kubenswrapper[5081]: I1003 15:30:37.764759 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.013574 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.084634 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kubelet-dir\") pod \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.084733 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kube-api-access\") pod \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\" (UID: \"dcc78925-9fb6-4e69-a028-9f876ac04fbb\") " Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.084843 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "dcc78925-9fb6-4e69-a028-9f876ac04fbb" (UID: "dcc78925-9fb6-4e69-a028-9f876ac04fbb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.085040 5081 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.105776 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "dcc78925-9fb6-4e69-a028-9f876ac04fbb" (UID: "dcc78925-9fb6-4e69-a028-9f876ac04fbb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.140004 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bcl7h"] Oct 03 15:30:38 crc kubenswrapper[5081]: W1003 15:30:38.156168 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode534f1b5_437a_4e9a_9f68_247a5b1fad20.slice/crio-13ff296b9f4c99a173a16e18616292d0d457a8d86cd94cbc07e7b902f878dd10 WatchSource:0}: Error finding container 13ff296b9f4c99a173a16e18616292d0d457a8d86cd94cbc07e7b902f878dd10: Status 404 returned error can't find the container with id 13ff296b9f4c99a173a16e18616292d0d457a8d86cd94cbc07e7b902f878dd10 Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.189248 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcc78925-9fb6-4e69-a028-9f876ac04fbb-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.672857 5081 generic.go:334] "Generic (PLEG): container finished" podID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerID="9161623a6b2b106df6b3b9a6270401910d6e30631331b0f7b11e833d4a2c6c4a" exitCode=0 Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.673036 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b4b85" event={"ID":"6c3b8de3-3277-44c9-b6d8-20f784938901","Type":"ContainerDied","Data":"9161623a6b2b106df6b3b9a6270401910d6e30631331b0f7b11e833d4a2c6c4a"} Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.678906 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcc78925-9fb6-4e69-a028-9f876ac04fbb","Type":"ContainerDied","Data":"9f47570d65f1c3f503ae068dec5ee84ba770b3a43287f68912be55f2f7b61dc4"} Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.678950 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f47570d65f1c3f503ae068dec5ee84ba770b3a43287f68912be55f2f7b61dc4" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.679010 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.684498 5081 generic.go:334] "Generic (PLEG): container finished" podID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerID="26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e" exitCode=0 Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.684648 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcl7h" event={"ID":"e534f1b5-437a-4e9a-9f68-247a5b1fad20","Type":"ContainerDied","Data":"26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e"} Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.684684 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcl7h" event={"ID":"e534f1b5-437a-4e9a-9f68-247a5b1fad20","Type":"ContainerStarted","Data":"13ff296b9f4c99a173a16e18616292d0d457a8d86cd94cbc07e7b902f878dd10"} Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.688656 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6a9fa8c4-0595-48b8-99c8-4ddda023045b","Type":"ContainerStarted","Data":"037165b21a95c5f957ace12c75563dcac490306397ba742d9bfde163194523f3"} Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.750327 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.750304034 podStartE2EDuration="2.750304034s" podCreationTimestamp="2025-10-03 15:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:30:38.74679354 +0000 UTC m=+157.712350153" watchObservedRunningTime="2025-10-03 15:30:38.750304034 +0000 UTC m=+157.715860647" Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.763897 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:38 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:38 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:38 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:38 crc kubenswrapper[5081]: I1003 15:30:38.763984 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:39 crc kubenswrapper[5081]: I1003 15:30:39.703778 5081 generic.go:334] "Generic (PLEG): container finished" podID="6a9fa8c4-0595-48b8-99c8-4ddda023045b" containerID="037165b21a95c5f957ace12c75563dcac490306397ba742d9bfde163194523f3" exitCode=0 Oct 03 15:30:39 crc kubenswrapper[5081]: I1003 15:30:39.703873 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6a9fa8c4-0595-48b8-99c8-4ddda023045b","Type":"ContainerDied","Data":"037165b21a95c5f957ace12c75563dcac490306397ba742d9bfde163194523f3"} Oct 03 15:30:39 crc kubenswrapper[5081]: I1003 15:30:39.765982 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:39 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:39 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:39 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:39 crc kubenswrapper[5081]: I1003 15:30:39.766092 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:40 crc kubenswrapper[5081]: I1003 15:30:40.337930 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:40 crc kubenswrapper[5081]: I1003 15:30:40.343604 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-pz8gv" Oct 03 15:30:40 crc kubenswrapper[5081]: I1003 15:30:40.772642 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:40 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:40 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:40 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:40 crc kubenswrapper[5081]: I1003 15:30:40.772707 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.068740 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-9ksc9" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.140581 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.302157 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kubelet-dir\") pod \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.302354 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kube-api-access\") pod \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\" (UID: \"6a9fa8c4-0595-48b8-99c8-4ddda023045b\") " Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.302497 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6a9fa8c4-0595-48b8-99c8-4ddda023045b" (UID: "6a9fa8c4-0595-48b8-99c8-4ddda023045b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.302809 5081 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.310118 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6a9fa8c4-0595-48b8-99c8-4ddda023045b" (UID: "6a9fa8c4-0595-48b8-99c8-4ddda023045b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.403899 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a9fa8c4-0595-48b8-99c8-4ddda023045b-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.725626 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6a9fa8c4-0595-48b8-99c8-4ddda023045b","Type":"ContainerDied","Data":"77209bf29dae990121528f8ed33a17143d8bbafad24fdf3e5e0164ec8893e860"} Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.725690 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77209bf29dae990121528f8ed33a17143d8bbafad24fdf3e5e0164ec8893e860" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.725704 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.764793 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:41 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:41 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:41 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:41 crc kubenswrapper[5081]: I1003 15:30:41.764871 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:42 crc kubenswrapper[5081]: I1003 15:30:42.763538 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:42 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:42 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:42 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:42 crc kubenswrapper[5081]: I1003 15:30:42.764166 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:43 crc kubenswrapper[5081]: I1003 15:30:43.764053 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:43 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:43 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:43 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:43 crc kubenswrapper[5081]: I1003 15:30:43.764168 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:44 crc kubenswrapper[5081]: I1003 15:30:44.763269 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:44 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:44 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:44 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:44 crc kubenswrapper[5081]: I1003 15:30:44.763795 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.341894 5081 patch_prober.go:28] interesting pod/downloads-7954f5f757-7v9g5 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.341994 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7v9g5" podUID="f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.342115 5081 patch_prober.go:28] interesting pod/downloads-7954f5f757-7v9g5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.342215 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7v9g5" podUID="f1e532dd-8dc1-4a78-8ebf-ad9c3b607a68" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.697327 5081 patch_prober.go:28] interesting pod/console-f9d7485db-hr5zj container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.697434 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-hr5zj" podUID="87f6080d-917b-4d20-a744-9fb3bad43a77" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.764141 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:45 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:45 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:45 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:45 crc kubenswrapper[5081]: I1003 15:30:45.764263 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:46 crc kubenswrapper[5081]: I1003 15:30:46.761991 5081 patch_prober.go:28] interesting pod/router-default-5444994796-qksdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 15:30:46 crc kubenswrapper[5081]: [-]has-synced failed: reason withheld Oct 03 15:30:46 crc kubenswrapper[5081]: [+]process-running ok Oct 03 15:30:46 crc kubenswrapper[5081]: healthz check failed Oct 03 15:30:46 crc kubenswrapper[5081]: I1003 15:30:46.762065 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qksdj" podUID="72843d3c-0fcf-4436-9516-164af96ca830" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:30:46 crc kubenswrapper[5081]: I1003 15:30:46.792816 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:46 crc kubenswrapper[5081]: I1003 15:30:46.812356 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/660149c8-a5c7-4581-abae-89611dafa042-metrics-certs\") pod \"network-metrics-daemon-zdszj\" (UID: \"660149c8-a5c7-4581-abae-89611dafa042\") " pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:46 crc kubenswrapper[5081]: I1003 15:30:46.947508 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zdszj" Oct 03 15:30:47 crc kubenswrapper[5081]: I1003 15:30:47.764784 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:47 crc kubenswrapper[5081]: I1003 15:30:47.767069 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-qksdj" Oct 03 15:30:54 crc kubenswrapper[5081]: I1003 15:30:54.562414 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:30:55 crc kubenswrapper[5081]: I1003 15:30:55.350106 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7v9g5" Oct 03 15:30:55 crc kubenswrapper[5081]: I1003 15:30:55.692173 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:30:55 crc kubenswrapper[5081]: I1003 15:30:55.696480 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:31:00 crc kubenswrapper[5081]: I1003 15:31:00.648197 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:31:00 crc kubenswrapper[5081]: I1003 15:31:00.648289 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:31:05 crc kubenswrapper[5081]: I1003 15:31:05.937249 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rwpg9" Oct 03 15:31:11 crc kubenswrapper[5081]: I1003 15:31:11.262530 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 15:31:15 crc kubenswrapper[5081]: E1003 15:31:15.528910 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage4288162105/2\": happened during read: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 15:31:15 crc kubenswrapper[5081]: E1003 15:31:15.529311 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kzps8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-b4b85_openshift-marketplace(6c3b8de3-3277-44c9-b6d8-20f784938901): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage4288162105/2\": happened during read: context canceled" logger="UnhandledError" Oct 03 15:31:15 crc kubenswrapper[5081]: E1003 15:31:15.533098 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage4288162105/2\\\": happened during read: context canceled\"" pod="openshift-marketplace/redhat-operators-b4b85" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" Oct 03 15:31:22 crc kubenswrapper[5081]: E1003 15:31:22.417323 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-b4b85" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" Oct 03 15:31:27 crc kubenswrapper[5081]: E1003 15:31:27.655677 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 15:31:27 crc kubenswrapper[5081]: E1003 15:31:27.656390 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9nllj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-6pt7z_openshift-marketplace(c5fac9b5-842e-4913-8c2f-6cfb992f3e1d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 15:31:27 crc kubenswrapper[5081]: E1003 15:31:27.657640 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-6pt7z" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" Oct 03 15:31:30 crc kubenswrapper[5081]: I1003 15:31:30.647673 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:31:30 crc kubenswrapper[5081]: I1003 15:31:30.648105 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:31:30 crc kubenswrapper[5081]: I1003 15:31:30.648166 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:31:30 crc kubenswrapper[5081]: I1003 15:31:30.648797 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:31:30 crc kubenswrapper[5081]: I1003 15:31:30.648917 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f" gracePeriod=600 Oct 03 15:31:34 crc kubenswrapper[5081]: I1003 15:31:34.081151 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f" exitCode=0 Oct 03 15:31:34 crc kubenswrapper[5081]: I1003 15:31:34.081221 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f"} Oct 03 15:31:35 crc kubenswrapper[5081]: E1003 15:31:35.079937 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-6pt7z" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" Oct 03 15:31:35 crc kubenswrapper[5081]: E1003 15:31:35.899747 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 15:31:35 crc kubenswrapper[5081]: E1003 15:31:35.899990 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j8qdd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-phpqk_openshift-marketplace(a20eff2d-16a6-4023-ae43-29d16ed9c041): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 15:31:35 crc kubenswrapper[5081]: E1003 15:31:35.901262 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-phpqk" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" Oct 03 15:31:36 crc kubenswrapper[5081]: E1003 15:31:36.004500 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 15:31:36 crc kubenswrapper[5081]: E1003 15:31:36.004776 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lswbr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-5htzp_openshift-marketplace(b975a9ae-a790-4a4b-b3da-0e21ab595446): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 15:31:36 crc kubenswrapper[5081]: E1003 15:31:36.006018 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-5htzp" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" Oct 03 15:31:37 crc kubenswrapper[5081]: E1003 15:31:37.054551 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 15:31:37 crc kubenswrapper[5081]: E1003 15:31:37.054790 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zczx5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9j2m7_openshift-marketplace(03e7d066-14df-4d26-a8aa-468168acc0a3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 15:31:37 crc kubenswrapper[5081]: E1003 15:31:37.056019 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9j2m7" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" Oct 03 15:31:38 crc kubenswrapper[5081]: E1003 15:31:38.837199 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-5htzp" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" Oct 03 15:31:38 crc kubenswrapper[5081]: E1003 15:31:38.837289 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-phpqk" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" Oct 03 15:31:38 crc kubenswrapper[5081]: E1003 15:31:38.837575 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9j2m7" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" Oct 03 15:31:38 crc kubenswrapper[5081]: E1003 15:31:38.919165 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 15:31:38 crc kubenswrapper[5081]: E1003 15:31:38.919675 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pqc4b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-bcl7h_openshift-marketplace(e534f1b5-437a-4e9a-9f68-247a5b1fad20): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 15:31:38 crc kubenswrapper[5081]: E1003 15:31:38.920852 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-bcl7h" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" Oct 03 15:31:39 crc kubenswrapper[5081]: E1003 15:31:39.638772 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-bcl7h" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" Oct 03 15:31:39 crc kubenswrapper[5081]: E1003 15:31:39.729763 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 15:31:39 crc kubenswrapper[5081]: E1003 15:31:39.729992 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w9b5k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-jrrss_openshift-marketplace(e15fe7f8-7909-4553-97d1-17417fc0b111): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 15:31:39 crc kubenswrapper[5081]: E1003 15:31:39.731245 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-jrrss" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" Oct 03 15:31:39 crc kubenswrapper[5081]: E1003 15:31:39.791736 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 15:31:39 crc kubenswrapper[5081]: E1003 15:31:39.792227 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xwklt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-fg69b_openshift-marketplace(43899581-01e6-4eda-87cd-33f5b3d6879b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 15:31:39 crc kubenswrapper[5081]: E1003 15:31:39.795576 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-fg69b" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" Oct 03 15:31:40 crc kubenswrapper[5081]: I1003 15:31:40.073057 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zdszj"] Oct 03 15:31:40 crc kubenswrapper[5081]: I1003 15:31:40.126434 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"682928bd6d83657f2137a748c80e0c603574009da0edd7028e9f7d2287a58ef8"} Oct 03 15:31:40 crc kubenswrapper[5081]: I1003 15:31:40.128936 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b4b85" event={"ID":"6c3b8de3-3277-44c9-b6d8-20f784938901","Type":"ContainerStarted","Data":"d5b15f555ab3d75d3f93e7143dfd24cd43fbc4bd189e64dbadfca5838ccd1c8b"} Oct 03 15:31:40 crc kubenswrapper[5081]: I1003 15:31:40.131845 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zdszj" event={"ID":"660149c8-a5c7-4581-abae-89611dafa042","Type":"ContainerStarted","Data":"264ebc82c53bbe324c051d6e55cb489b867517d8e7d9495b7a6c4ade9e8f249b"} Oct 03 15:31:40 crc kubenswrapper[5081]: E1003 15:31:40.133744 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-jrrss" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" Oct 03 15:31:40 crc kubenswrapper[5081]: E1003 15:31:40.134452 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-fg69b" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" Oct 03 15:31:41 crc kubenswrapper[5081]: I1003 15:31:41.143009 5081 generic.go:334] "Generic (PLEG): container finished" podID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerID="d5b15f555ab3d75d3f93e7143dfd24cd43fbc4bd189e64dbadfca5838ccd1c8b" exitCode=0 Oct 03 15:31:41 crc kubenswrapper[5081]: I1003 15:31:41.143190 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b4b85" event={"ID":"6c3b8de3-3277-44c9-b6d8-20f784938901","Type":"ContainerDied","Data":"d5b15f555ab3d75d3f93e7143dfd24cd43fbc4bd189e64dbadfca5838ccd1c8b"} Oct 03 15:31:41 crc kubenswrapper[5081]: I1003 15:31:41.148127 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zdszj" event={"ID":"660149c8-a5c7-4581-abae-89611dafa042","Type":"ContainerStarted","Data":"da592bbe70f9c9f9eff6d09d675062392d80a88cfbcaf4d61b4e8ed7f63d0780"} Oct 03 15:31:41 crc kubenswrapper[5081]: I1003 15:31:41.148215 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zdszj" event={"ID":"660149c8-a5c7-4581-abae-89611dafa042","Type":"ContainerStarted","Data":"813bf57b5931f6e6c37a56957804c4c943d19830f2486b9c86cc300b81b004ee"} Oct 03 15:31:42 crc kubenswrapper[5081]: I1003 15:31:42.160704 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b4b85" event={"ID":"6c3b8de3-3277-44c9-b6d8-20f784938901","Type":"ContainerStarted","Data":"34d7df48f18635f90616566908f9e36bbcad5e7fb390b5ebd840d588cf2fe832"} Oct 03 15:31:42 crc kubenswrapper[5081]: I1003 15:31:42.190762 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-zdszj" podStartSLOduration=198.19073388 podStartE2EDuration="3m18.19073388s" podCreationTimestamp="2025-10-03 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:31:41.214941136 +0000 UTC m=+220.180497849" watchObservedRunningTime="2025-10-03 15:31:42.19073388 +0000 UTC m=+221.156290503" Oct 03 15:31:47 crc kubenswrapper[5081]: I1003 15:31:47.152280 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:31:47 crc kubenswrapper[5081]: I1003 15:31:47.154890 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:31:47 crc kubenswrapper[5081]: I1003 15:31:47.852343 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b4b85" podStartSLOduration=8.81929348 podStartE2EDuration="1m11.852320149s" podCreationTimestamp="2025-10-03 15:30:36 +0000 UTC" firstStartedPulling="2025-10-03 15:30:38.675743835 +0000 UTC m=+157.641300448" lastFinishedPulling="2025-10-03 15:31:41.708770504 +0000 UTC m=+220.674327117" observedRunningTime="2025-10-03 15:31:42.189850284 +0000 UTC m=+221.155406897" watchObservedRunningTime="2025-10-03 15:31:47.852320149 +0000 UTC m=+226.817876762" Oct 03 15:31:48 crc kubenswrapper[5081]: I1003 15:31:48.309058 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b4b85" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="registry-server" probeResult="failure" output=< Oct 03 15:31:48 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 15:31:48 crc kubenswrapper[5081]: > Oct 03 15:31:50 crc kubenswrapper[5081]: I1003 15:31:50.214346 5081 generic.go:334] "Generic (PLEG): container finished" podID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerID="1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3" exitCode=0 Oct 03 15:31:50 crc kubenswrapper[5081]: I1003 15:31:50.214463 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6pt7z" event={"ID":"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d","Type":"ContainerDied","Data":"1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3"} Oct 03 15:31:51 crc kubenswrapper[5081]: I1003 15:31:51.228211 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6pt7z" event={"ID":"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d","Type":"ContainerStarted","Data":"b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c"} Oct 03 15:31:52 crc kubenswrapper[5081]: I1003 15:31:52.256282 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6pt7z" podStartSLOduration=4.131646467 podStartE2EDuration="1m19.256256024s" podCreationTimestamp="2025-10-03 15:30:33 +0000 UTC" firstStartedPulling="2025-10-03 15:30:35.505372622 +0000 UTC m=+154.470929235" lastFinishedPulling="2025-10-03 15:31:50.629982169 +0000 UTC m=+229.595538792" observedRunningTime="2025-10-03 15:31:52.254199643 +0000 UTC m=+231.219756266" watchObservedRunningTime="2025-10-03 15:31:52.256256024 +0000 UTC m=+231.221812637" Oct 03 15:31:53 crc kubenswrapper[5081]: I1003 15:31:53.952496 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:31:53 crc kubenswrapper[5081]: I1003 15:31:53.953335 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:31:53 crc kubenswrapper[5081]: E1003 15:31:53.990170 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb975a9ae_a790_4a4b_b3da_0e21ab595446.slice/crio-conmon-a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb975a9ae_a790_4a4b_b3da_0e21ab595446.slice/crio-a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6.scope\": RecentStats: unable to find data in memory cache]" Oct 03 15:31:54 crc kubenswrapper[5081]: I1003 15:31:54.011419 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:31:54 crc kubenswrapper[5081]: I1003 15:31:54.248136 5081 generic.go:334] "Generic (PLEG): container finished" podID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerID="4a4bc66739de75440831ebd6a53422c1af34a0d2a954ca949e02b4aab1880f7e" exitCode=0 Oct 03 15:31:54 crc kubenswrapper[5081]: I1003 15:31:54.248264 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phpqk" event={"ID":"a20eff2d-16a6-4023-ae43-29d16ed9c041","Type":"ContainerDied","Data":"4a4bc66739de75440831ebd6a53422c1af34a0d2a954ca949e02b4aab1880f7e"} Oct 03 15:31:54 crc kubenswrapper[5081]: I1003 15:31:54.251024 5081 generic.go:334] "Generic (PLEG): container finished" podID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerID="887322dadd1b91f6f3ee9e66f37e37912d9c0557d0254f315ebc894a6f37c77c" exitCode=0 Oct 03 15:31:54 crc kubenswrapper[5081]: I1003 15:31:54.252007 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg69b" event={"ID":"43899581-01e6-4eda-87cd-33f5b3d6879b","Type":"ContainerDied","Data":"887322dadd1b91f6f3ee9e66f37e37912d9c0557d0254f315ebc894a6f37c77c"} Oct 03 15:31:54 crc kubenswrapper[5081]: I1003 15:31:54.255699 5081 generic.go:334] "Generic (PLEG): container finished" podID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerID="a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6" exitCode=0 Oct 03 15:31:54 crc kubenswrapper[5081]: I1003 15:31:54.256152 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5htzp" event={"ID":"b975a9ae-a790-4a4b-b3da-0e21ab595446","Type":"ContainerDied","Data":"a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6"} Oct 03 15:31:55 crc kubenswrapper[5081]: I1003 15:31:55.263798 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5htzp" event={"ID":"b975a9ae-a790-4a4b-b3da-0e21ab595446","Type":"ContainerStarted","Data":"74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47"} Oct 03 15:31:55 crc kubenswrapper[5081]: I1003 15:31:55.267946 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phpqk" event={"ID":"a20eff2d-16a6-4023-ae43-29d16ed9c041","Type":"ContainerStarted","Data":"cd1bc455f34051d89197d11cc86960120897326031562230af13318437f849de"} Oct 03 15:31:55 crc kubenswrapper[5081]: I1003 15:31:55.270111 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg69b" event={"ID":"43899581-01e6-4eda-87cd-33f5b3d6879b","Type":"ContainerStarted","Data":"c4e3dbd961ae262037229fd0208b39897bc87eb5182fcfd9d4e350b2854c078c"} Oct 03 15:31:55 crc kubenswrapper[5081]: I1003 15:31:55.290433 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5htzp" podStartSLOduration=1.795241109 podStartE2EDuration="1m21.290388081s" podCreationTimestamp="2025-10-03 15:30:34 +0000 UTC" firstStartedPulling="2025-10-03 15:30:35.51710337 +0000 UTC m=+154.482659993" lastFinishedPulling="2025-10-03 15:31:55.012250312 +0000 UTC m=+233.977806965" observedRunningTime="2025-10-03 15:31:55.284739884 +0000 UTC m=+234.250296547" watchObservedRunningTime="2025-10-03 15:31:55.290388081 +0000 UTC m=+234.255944704" Oct 03 15:31:55 crc kubenswrapper[5081]: I1003 15:31:55.314362 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-phpqk" podStartSLOduration=2.7758091030000003 podStartE2EDuration="1m22.314344351s" podCreationTimestamp="2025-10-03 15:30:33 +0000 UTC" firstStartedPulling="2025-10-03 15:30:35.519383017 +0000 UTC m=+154.484939630" lastFinishedPulling="2025-10-03 15:31:55.057918265 +0000 UTC m=+234.023474878" observedRunningTime="2025-10-03 15:31:55.313216128 +0000 UTC m=+234.278772741" watchObservedRunningTime="2025-10-03 15:31:55.314344351 +0000 UTC m=+234.279900974" Oct 03 15:31:55 crc kubenswrapper[5081]: I1003 15:31:55.341504 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fg69b" podStartSLOduration=3.093512619 podStartE2EDuration="1m20.341476505s" podCreationTimestamp="2025-10-03 15:30:35 +0000 UTC" firstStartedPulling="2025-10-03 15:30:37.686808811 +0000 UTC m=+156.652365414" lastFinishedPulling="2025-10-03 15:31:54.934772677 +0000 UTC m=+233.900329300" observedRunningTime="2025-10-03 15:31:55.33457202 +0000 UTC m=+234.300128643" watchObservedRunningTime="2025-10-03 15:31:55.341476505 +0000 UTC m=+234.307033118" Oct 03 15:31:56 crc kubenswrapper[5081]: I1003 15:31:56.128189 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:31:56 crc kubenswrapper[5081]: I1003 15:31:56.128363 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:31:56 crc kubenswrapper[5081]: I1003 15:31:56.179460 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:31:56 crc kubenswrapper[5081]: I1003 15:31:56.278670 5081 generic.go:334] "Generic (PLEG): container finished" podID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerID="f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e" exitCode=0 Oct 03 15:31:56 crc kubenswrapper[5081]: I1003 15:31:56.278808 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9j2m7" event={"ID":"03e7d066-14df-4d26-a8aa-468168acc0a3","Type":"ContainerDied","Data":"f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e"} Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.204738 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.250712 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.294752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9j2m7" event={"ID":"03e7d066-14df-4d26-a8aa-468168acc0a3","Type":"ContainerStarted","Data":"135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf"} Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.306446 5081 generic.go:334] "Generic (PLEG): container finished" podID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerID="fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6" exitCode=0 Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.306519 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jrrss" event={"ID":"e15fe7f8-7909-4553-97d1-17417fc0b111","Type":"ContainerDied","Data":"fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6"} Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.311094 5081 generic.go:334] "Generic (PLEG): container finished" podID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerID="7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2" exitCode=0 Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.311194 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcl7h" event={"ID":"e534f1b5-437a-4e9a-9f68-247a5b1fad20","Type":"ContainerDied","Data":"7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2"} Oct 03 15:31:57 crc kubenswrapper[5081]: I1003 15:31:57.320795 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9j2m7" podStartSLOduration=3.121540885 podStartE2EDuration="1m24.320774215s" podCreationTimestamp="2025-10-03 15:30:33 +0000 UTC" firstStartedPulling="2025-10-03 15:30:35.505989421 +0000 UTC m=+154.471546034" lastFinishedPulling="2025-10-03 15:31:56.705222761 +0000 UTC m=+235.670779364" observedRunningTime="2025-10-03 15:31:57.318146097 +0000 UTC m=+236.283702710" watchObservedRunningTime="2025-10-03 15:31:57.320774215 +0000 UTC m=+236.286330828" Oct 03 15:31:58 crc kubenswrapper[5081]: I1003 15:31:58.319373 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jrrss" event={"ID":"e15fe7f8-7909-4553-97d1-17417fc0b111","Type":"ContainerStarted","Data":"a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694"} Oct 03 15:31:58 crc kubenswrapper[5081]: I1003 15:31:58.325582 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcl7h" event={"ID":"e534f1b5-437a-4e9a-9f68-247a5b1fad20","Type":"ContainerStarted","Data":"c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c"} Oct 03 15:31:58 crc kubenswrapper[5081]: I1003 15:31:58.369201 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bcl7h" podStartSLOduration=2.286278061 podStartE2EDuration="1m21.369165761s" podCreationTimestamp="2025-10-03 15:30:37 +0000 UTC" firstStartedPulling="2025-10-03 15:30:38.68634858 +0000 UTC m=+157.651905193" lastFinishedPulling="2025-10-03 15:31:57.76923629 +0000 UTC m=+236.734792893" observedRunningTime="2025-10-03 15:31:58.364765381 +0000 UTC m=+237.330321994" watchObservedRunningTime="2025-10-03 15:31:58.369165761 +0000 UTC m=+237.334722374" Oct 03 15:31:58 crc kubenswrapper[5081]: I1003 15:31:58.370917 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jrrss" podStartSLOduration=2.351550633 podStartE2EDuration="1m22.370909333s" podCreationTimestamp="2025-10-03 15:30:36 +0000 UTC" firstStartedPulling="2025-10-03 15:30:37.689096509 +0000 UTC m=+156.654653122" lastFinishedPulling="2025-10-03 15:31:57.708455209 +0000 UTC m=+236.674011822" observedRunningTime="2025-10-03 15:31:58.344346056 +0000 UTC m=+237.309902689" watchObservedRunningTime="2025-10-03 15:31:58.370909333 +0000 UTC m=+237.336465956" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.024748 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.121663 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.122068 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.161243 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.395668 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.395718 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.407759 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.454345 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.549164 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.549229 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:32:04 crc kubenswrapper[5081]: I1003 15:32:04.596508 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:32:05 crc kubenswrapper[5081]: I1003 15:32:05.412809 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:32:05 crc kubenswrapper[5081]: I1003 15:32:05.413369 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:32:06 crc kubenswrapper[5081]: I1003 15:32:06.175317 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:32:06 crc kubenswrapper[5081]: I1003 15:32:06.267413 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5htzp"] Oct 03 15:32:06 crc kubenswrapper[5081]: I1003 15:32:06.564163 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:32:06 crc kubenswrapper[5081]: I1003 15:32:06.564242 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:32:06 crc kubenswrapper[5081]: I1003 15:32:06.602216 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:32:06 crc kubenswrapper[5081]: I1003 15:32:06.863822 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9j2m7"] Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.389705 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9j2m7" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="registry-server" containerID="cri-o://135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf" gracePeriod=2 Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.389552 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5htzp" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="registry-server" containerID="cri-o://74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47" gracePeriod=2 Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.430213 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.539192 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.539754 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.594267 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.877658 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.881060 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.921511 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zczx5\" (UniqueName: \"kubernetes.io/projected/03e7d066-14df-4d26-a8aa-468168acc0a3-kube-api-access-zczx5\") pod \"03e7d066-14df-4d26-a8aa-468168acc0a3\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.921647 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lswbr\" (UniqueName: \"kubernetes.io/projected/b975a9ae-a790-4a4b-b3da-0e21ab595446-kube-api-access-lswbr\") pod \"b975a9ae-a790-4a4b-b3da-0e21ab595446\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.921724 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-utilities\") pod \"03e7d066-14df-4d26-a8aa-468168acc0a3\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.921795 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-utilities\") pod \"b975a9ae-a790-4a4b-b3da-0e21ab595446\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.922603 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-catalog-content\") pod \"03e7d066-14df-4d26-a8aa-468168acc0a3\" (UID: \"03e7d066-14df-4d26-a8aa-468168acc0a3\") " Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.922642 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-catalog-content\") pod \"b975a9ae-a790-4a4b-b3da-0e21ab595446\" (UID: \"b975a9ae-a790-4a4b-b3da-0e21ab595446\") " Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.923388 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-utilities" (OuterVolumeSpecName: "utilities") pod "03e7d066-14df-4d26-a8aa-468168acc0a3" (UID: "03e7d066-14df-4d26-a8aa-468168acc0a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.923662 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-utilities" (OuterVolumeSpecName: "utilities") pod "b975a9ae-a790-4a4b-b3da-0e21ab595446" (UID: "b975a9ae-a790-4a4b-b3da-0e21ab595446"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.931258 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b975a9ae-a790-4a4b-b3da-0e21ab595446-kube-api-access-lswbr" (OuterVolumeSpecName: "kube-api-access-lswbr") pod "b975a9ae-a790-4a4b-b3da-0e21ab595446" (UID: "b975a9ae-a790-4a4b-b3da-0e21ab595446"). InnerVolumeSpecName "kube-api-access-lswbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.931795 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03e7d066-14df-4d26-a8aa-468168acc0a3-kube-api-access-zczx5" (OuterVolumeSpecName: "kube-api-access-zczx5") pod "03e7d066-14df-4d26-a8aa-468168acc0a3" (UID: "03e7d066-14df-4d26-a8aa-468168acc0a3"). InnerVolumeSpecName "kube-api-access-zczx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.973350 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03e7d066-14df-4d26-a8aa-468168acc0a3" (UID: "03e7d066-14df-4d26-a8aa-468168acc0a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:07 crc kubenswrapper[5081]: I1003 15:32:07.987249 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b975a9ae-a790-4a4b-b3da-0e21ab595446" (UID: "b975a9ae-a790-4a4b-b3da-0e21ab595446"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.024432 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.024489 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.024513 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b975a9ae-a790-4a4b-b3da-0e21ab595446-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.024531 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zczx5\" (UniqueName: \"kubernetes.io/projected/03e7d066-14df-4d26-a8aa-468168acc0a3-kube-api-access-zczx5\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.024545 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lswbr\" (UniqueName: \"kubernetes.io/projected/b975a9ae-a790-4a4b-b3da-0e21ab595446-kube-api-access-lswbr\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.024576 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03e7d066-14df-4d26-a8aa-468168acc0a3-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.400841 5081 generic.go:334] "Generic (PLEG): container finished" podID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerID="74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47" exitCode=0 Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.400944 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5htzp" event={"ID":"b975a9ae-a790-4a4b-b3da-0e21ab595446","Type":"ContainerDied","Data":"74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47"} Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.400989 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5htzp" event={"ID":"b975a9ae-a790-4a4b-b3da-0e21ab595446","Type":"ContainerDied","Data":"2947cd1a707fc5a466d622da535b5914bed266e6b6b51950624668bf2e660c26"} Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.401024 5081 scope.go:117] "RemoveContainer" containerID="74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.401197 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5htzp" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.417701 5081 generic.go:334] "Generic (PLEG): container finished" podID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerID="135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf" exitCode=0 Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.417783 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9j2m7" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.417779 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9j2m7" event={"ID":"03e7d066-14df-4d26-a8aa-468168acc0a3","Type":"ContainerDied","Data":"135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf"} Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.417843 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9j2m7" event={"ID":"03e7d066-14df-4d26-a8aa-468168acc0a3","Type":"ContainerDied","Data":"eeea7672ce92136aa860e367d74548ea9223ecb2247763d647d35ee6879d8418"} Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.443305 5081 scope.go:117] "RemoveContainer" containerID="a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.453790 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5htzp"] Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.483040 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5htzp"] Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.508314 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9j2m7"] Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.512806 5081 scope.go:117] "RemoveContainer" containerID="b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.516271 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9j2m7"] Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.546326 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.566841 5081 scope.go:117] "RemoveContainer" containerID="74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47" Oct 03 15:32:08 crc kubenswrapper[5081]: E1003 15:32:08.567404 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47\": container with ID starting with 74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47 not found: ID does not exist" containerID="74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.567438 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47"} err="failed to get container status \"74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47\": rpc error: code = NotFound desc = could not find container \"74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47\": container with ID starting with 74bb11a6b170a9c7e6b4541a248620f74473f1d28d151ec156356e85196eda47 not found: ID does not exist" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.567460 5081 scope.go:117] "RemoveContainer" containerID="a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6" Oct 03 15:32:08 crc kubenswrapper[5081]: E1003 15:32:08.570592 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6\": container with ID starting with a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6 not found: ID does not exist" containerID="a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.570614 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6"} err="failed to get container status \"a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6\": rpc error: code = NotFound desc = could not find container \"a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6\": container with ID starting with a41c4504b0e4fe49514cae0ee117a49cdb61fd1d516cc51f74bb52354d87dbf6 not found: ID does not exist" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.570628 5081 scope.go:117] "RemoveContainer" containerID="b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0" Oct 03 15:32:08 crc kubenswrapper[5081]: E1003 15:32:08.570947 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0\": container with ID starting with b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0 not found: ID does not exist" containerID="b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.571005 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0"} err="failed to get container status \"b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0\": rpc error: code = NotFound desc = could not find container \"b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0\": container with ID starting with b576e70006a749cb2a8a0c04ccb4ea645098675ecba1f2c785630ec83122eca0 not found: ID does not exist" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.571037 5081 scope.go:117] "RemoveContainer" containerID="135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.595033 5081 scope.go:117] "RemoveContainer" containerID="f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.610605 5081 scope.go:117] "RemoveContainer" containerID="5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.625744 5081 scope.go:117] "RemoveContainer" containerID="135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf" Oct 03 15:32:08 crc kubenswrapper[5081]: E1003 15:32:08.626323 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf\": container with ID starting with 135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf not found: ID does not exist" containerID="135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.626368 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf"} err="failed to get container status \"135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf\": rpc error: code = NotFound desc = could not find container \"135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf\": container with ID starting with 135f6fb867b619c4f2ccd95eab12942c2246b1290e1311906686e6da642889bf not found: ID does not exist" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.626400 5081 scope.go:117] "RemoveContainer" containerID="f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e" Oct 03 15:32:08 crc kubenswrapper[5081]: E1003 15:32:08.626993 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e\": container with ID starting with f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e not found: ID does not exist" containerID="f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.627018 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e"} err="failed to get container status \"f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e\": rpc error: code = NotFound desc = could not find container \"f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e\": container with ID starting with f73bec536a32d637845ce959f34475928acf5a36e5ba5ce8727f261b868fff1e not found: ID does not exist" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.627031 5081 scope.go:117] "RemoveContainer" containerID="5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140" Oct 03 15:32:08 crc kubenswrapper[5081]: E1003 15:32:08.627488 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140\": container with ID starting with 5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140 not found: ID does not exist" containerID="5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.627574 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140"} err="failed to get container status \"5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140\": rpc error: code = NotFound desc = could not find container \"5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140\": container with ID starting with 5f64b6025f63476e3cf7a1d0988aaaeb848c56b1a201b22e44fd89127effb140 not found: ID does not exist" Oct 03 15:32:08 crc kubenswrapper[5081]: I1003 15:32:08.664522 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jrrss"] Oct 03 15:32:09 crc kubenswrapper[5081]: I1003 15:32:09.427700 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jrrss" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="registry-server" containerID="cri-o://a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694" gracePeriod=2 Oct 03 15:32:09 crc kubenswrapper[5081]: I1003 15:32:09.835293 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" path="/var/lib/kubelet/pods/03e7d066-14df-4d26-a8aa-468168acc0a3/volumes" Oct 03 15:32:09 crc kubenswrapper[5081]: I1003 15:32:09.836424 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" path="/var/lib/kubelet/pods/b975a9ae-a790-4a4b-b3da-0e21ab595446/volumes" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.339838 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.436221 5081 generic.go:334] "Generic (PLEG): container finished" podID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerID="a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694" exitCode=0 Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.436330 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jrrss" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.436376 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jrrss" event={"ID":"e15fe7f8-7909-4553-97d1-17417fc0b111","Type":"ContainerDied","Data":"a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694"} Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.436437 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jrrss" event={"ID":"e15fe7f8-7909-4553-97d1-17417fc0b111","Type":"ContainerDied","Data":"bbf677e278232ffd6130831b6888dfcc5d0d9d659026ee55198ffdde3524d0a8"} Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.436458 5081 scope.go:117] "RemoveContainer" containerID="a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.457511 5081 scope.go:117] "RemoveContainer" containerID="fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.461647 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9b5k\" (UniqueName: \"kubernetes.io/projected/e15fe7f8-7909-4553-97d1-17417fc0b111-kube-api-access-w9b5k\") pod \"e15fe7f8-7909-4553-97d1-17417fc0b111\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.461710 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-utilities\") pod \"e15fe7f8-7909-4553-97d1-17417fc0b111\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.461791 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-catalog-content\") pod \"e15fe7f8-7909-4553-97d1-17417fc0b111\" (UID: \"e15fe7f8-7909-4553-97d1-17417fc0b111\") " Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.463346 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-utilities" (OuterVolumeSpecName: "utilities") pod "e15fe7f8-7909-4553-97d1-17417fc0b111" (UID: "e15fe7f8-7909-4553-97d1-17417fc0b111"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.476467 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e15fe7f8-7909-4553-97d1-17417fc0b111-kube-api-access-w9b5k" (OuterVolumeSpecName: "kube-api-access-w9b5k") pod "e15fe7f8-7909-4553-97d1-17417fc0b111" (UID: "e15fe7f8-7909-4553-97d1-17417fc0b111"). InnerVolumeSpecName "kube-api-access-w9b5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.478893 5081 scope.go:117] "RemoveContainer" containerID="0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.481760 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e15fe7f8-7909-4553-97d1-17417fc0b111" (UID: "e15fe7f8-7909-4553-97d1-17417fc0b111"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.506404 5081 scope.go:117] "RemoveContainer" containerID="a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694" Oct 03 15:32:10 crc kubenswrapper[5081]: E1003 15:32:10.507111 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694\": container with ID starting with a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694 not found: ID does not exist" containerID="a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.507195 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694"} err="failed to get container status \"a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694\": rpc error: code = NotFound desc = could not find container \"a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694\": container with ID starting with a2f5e22034ae38238b370b68126e1195e69a432d45f89b8e9734da8f31429694 not found: ID does not exist" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.507240 5081 scope.go:117] "RemoveContainer" containerID="fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6" Oct 03 15:32:10 crc kubenswrapper[5081]: E1003 15:32:10.507821 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6\": container with ID starting with fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6 not found: ID does not exist" containerID="fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.507877 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6"} err="failed to get container status \"fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6\": rpc error: code = NotFound desc = could not find container \"fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6\": container with ID starting with fd1bfd7b8e05455a1d39c9acbc4475da37ec650b17630d4bb315d46fcd7de2d6 not found: ID does not exist" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.507922 5081 scope.go:117] "RemoveContainer" containerID="0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d" Oct 03 15:32:10 crc kubenswrapper[5081]: E1003 15:32:10.508244 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d\": container with ID starting with 0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d not found: ID does not exist" containerID="0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.508284 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d"} err="failed to get container status \"0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d\": rpc error: code = NotFound desc = could not find container \"0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d\": container with ID starting with 0be5c5df8f63e4389de95675969dbe5ce375cf52b37233f5e17805a67f1d144d not found: ID does not exist" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.563869 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9b5k\" (UniqueName: \"kubernetes.io/projected/e15fe7f8-7909-4553-97d1-17417fc0b111-kube-api-access-w9b5k\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.563920 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.563932 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e15fe7f8-7909-4553-97d1-17417fc0b111-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.794439 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jrrss"] Oct 03 15:32:10 crc kubenswrapper[5081]: I1003 15:32:10.797959 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jrrss"] Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.265123 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bcl7h"] Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.443230 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bcl7h" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="registry-server" containerID="cri-o://c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c" gracePeriod=2 Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.845384 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" path="/var/lib/kubelet/pods/e15fe7f8-7909-4553-97d1-17417fc0b111/volumes" Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.890405 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.986888 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-catalog-content\") pod \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.987032 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-utilities\") pod \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.987083 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqc4b\" (UniqueName: \"kubernetes.io/projected/e534f1b5-437a-4e9a-9f68-247a5b1fad20-kube-api-access-pqc4b\") pod \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\" (UID: \"e534f1b5-437a-4e9a-9f68-247a5b1fad20\") " Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.989091 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-utilities" (OuterVolumeSpecName: "utilities") pod "e534f1b5-437a-4e9a-9f68-247a5b1fad20" (UID: "e534f1b5-437a-4e9a-9f68-247a5b1fad20"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:11 crc kubenswrapper[5081]: I1003 15:32:11.996921 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e534f1b5-437a-4e9a-9f68-247a5b1fad20-kube-api-access-pqc4b" (OuterVolumeSpecName: "kube-api-access-pqc4b") pod "e534f1b5-437a-4e9a-9f68-247a5b1fad20" (UID: "e534f1b5-437a-4e9a-9f68-247a5b1fad20"). InnerVolumeSpecName "kube-api-access-pqc4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.061474 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e534f1b5-437a-4e9a-9f68-247a5b1fad20" (UID: "e534f1b5-437a-4e9a-9f68-247a5b1fad20"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.088200 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.088245 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqc4b\" (UniqueName: \"kubernetes.io/projected/e534f1b5-437a-4e9a-9f68-247a5b1fad20-kube-api-access-pqc4b\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.088256 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e534f1b5-437a-4e9a-9f68-247a5b1fad20-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.453032 5081 generic.go:334] "Generic (PLEG): container finished" podID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerID="c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c" exitCode=0 Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.453095 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcl7h" event={"ID":"e534f1b5-437a-4e9a-9f68-247a5b1fad20","Type":"ContainerDied","Data":"c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c"} Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.453171 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcl7h" event={"ID":"e534f1b5-437a-4e9a-9f68-247a5b1fad20","Type":"ContainerDied","Data":"13ff296b9f4c99a173a16e18616292d0d457a8d86cd94cbc07e7b902f878dd10"} Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.453194 5081 scope.go:117] "RemoveContainer" containerID="c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.453120 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcl7h" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.483682 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bcl7h"] Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.486513 5081 scope.go:117] "RemoveContainer" containerID="7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.488311 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bcl7h"] Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.507032 5081 scope.go:117] "RemoveContainer" containerID="26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.520357 5081 scope.go:117] "RemoveContainer" containerID="c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c" Oct 03 15:32:12 crc kubenswrapper[5081]: E1003 15:32:12.520798 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c\": container with ID starting with c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c not found: ID does not exist" containerID="c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.520848 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c"} err="failed to get container status \"c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c\": rpc error: code = NotFound desc = could not find container \"c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c\": container with ID starting with c82e5ab424dfab73c7e8cf6cfadd014db62a25426a20aa46929c23cffc82d30c not found: ID does not exist" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.520881 5081 scope.go:117] "RemoveContainer" containerID="7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2" Oct 03 15:32:12 crc kubenswrapper[5081]: E1003 15:32:12.521384 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2\": container with ID starting with 7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2 not found: ID does not exist" containerID="7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.521444 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2"} err="failed to get container status \"7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2\": rpc error: code = NotFound desc = could not find container \"7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2\": container with ID starting with 7bd4db016061dd1da0e2f3c1f570ec12bf493b85e49287f5179fd79080a3c6c2 not found: ID does not exist" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.521518 5081 scope.go:117] "RemoveContainer" containerID="26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e" Oct 03 15:32:12 crc kubenswrapper[5081]: E1003 15:32:12.522058 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e\": container with ID starting with 26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e not found: ID does not exist" containerID="26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e" Oct 03 15:32:12 crc kubenswrapper[5081]: I1003 15:32:12.522091 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e"} err="failed to get container status \"26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e\": rpc error: code = NotFound desc = could not find container \"26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e\": container with ID starting with 26a30a215a2a4e193b0521d83cb6cfee069f77c2caa0e8213032408fce876d8e not found: ID does not exist" Oct 03 15:32:13 crc kubenswrapper[5081]: I1003 15:32:13.836346 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" path="/var/lib/kubelet/pods/e534f1b5-437a-4e9a-9f68-247a5b1fad20/volumes" Oct 03 15:32:14 crc kubenswrapper[5081]: I1003 15:32:14.561988 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ntvnz"] Oct 03 15:32:39 crc kubenswrapper[5081]: I1003 15:32:39.590395 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" podUID="86d5b7db-0c22-4446-9bff-2ff5493f9288" containerName="oauth-openshift" containerID="cri-o://83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c" gracePeriod=15 Oct 03 15:32:39 crc kubenswrapper[5081]: I1003 15:32:39.968911 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.005720 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-85d9bf6778-n5jj6"] Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.005962 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc78925-9fb6-4e69-a028-9f876ac04fbb" containerName="pruner" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.005974 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc78925-9fb6-4e69-a028-9f876ac04fbb" containerName="pruner" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.005987 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.005993 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006000 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006007 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006016 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d5b7db-0c22-4446-9bff-2ff5493f9288" containerName="oauth-openshift" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006022 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d5b7db-0c22-4446-9bff-2ff5493f9288" containerName="oauth-openshift" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006033 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006039 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006049 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9fa8c4-0595-48b8-99c8-4ddda023045b" containerName="pruner" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006056 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9fa8c4-0595-48b8-99c8-4ddda023045b" containerName="pruner" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006063 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006069 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006081 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006087 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006096 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006102 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006111 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006118 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006126 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006132 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006141 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006146 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="extract-content" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006155 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006161 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006168 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006173 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.006179 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006184 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="extract-utilities" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006316 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e534f1b5-437a-4e9a-9f68-247a5b1fad20" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006327 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc78925-9fb6-4e69-a028-9f876ac04fbb" containerName="pruner" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006335 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e15fe7f8-7909-4553-97d1-17417fc0b111" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006344 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9fa8c4-0595-48b8-99c8-4ddda023045b" containerName="pruner" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006352 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="86d5b7db-0c22-4446-9bff-2ff5493f9288" containerName="oauth-openshift" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006360 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="03e7d066-14df-4d26-a8aa-468168acc0a3" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006367 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b975a9ae-a790-4a4b-b3da-0e21ab595446" containerName="registry-server" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.006821 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.022916 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-85d9bf6778-n5jj6"] Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132632 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlds9\" (UniqueName: \"kubernetes.io/projected/86d5b7db-0c22-4446-9bff-2ff5493f9288-kube-api-access-jlds9\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132753 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-cliconfig\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132775 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-policies\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132822 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-dir\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132847 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-login\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132890 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-ocp-branding-template\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132915 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-trusted-ca-bundle\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.132946 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-error\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133004 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-serving-cert\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133041 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-provider-selection\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133072 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-session\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133102 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-router-certs\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133157 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-idp-0-file-data\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133187 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-service-ca\") pod \"86d5b7db-0c22-4446-9bff-2ff5493f9288\" (UID: \"86d5b7db-0c22-4446-9bff-2ff5493f9288\") " Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133371 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-session\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133416 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133453 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-serving-cert\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133476 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-login\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133502 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133529 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-cliconfig\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133641 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-error\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133665 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-router-certs\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133686 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3b03bd08-f9d0-4054-a9b2-6b3678a84723-audit-dir\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133712 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-service-ca\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133749 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-audit-policies\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133765 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133789 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd4kl\" (UniqueName: \"kubernetes.io/projected/3b03bd08-f9d0-4054-a9b2-6b3678a84723-kube-api-access-qd4kl\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133760 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133811 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133852 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.133885 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.134274 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.134912 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.136123 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.146702 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.146872 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86d5b7db-0c22-4446-9bff-2ff5493f9288-kube-api-access-jlds9" (OuterVolumeSpecName: "kube-api-access-jlds9") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "kube-api-access-jlds9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.147194 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.147536 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.148532 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.149297 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.150283 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.150649 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.150883 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "86d5b7db-0c22-4446-9bff-2ff5493f9288" (UID: "86d5b7db-0c22-4446-9bff-2ff5493f9288"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.234872 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-audit-policies\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.234958 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.234993 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd4kl\" (UniqueName: \"kubernetes.io/projected/3b03bd08-f9d0-4054-a9b2-6b3678a84723-kube-api-access-qd4kl\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235022 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235056 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-session\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235085 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235111 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-serving-cert\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235131 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-login\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235152 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235178 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-cliconfig\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235207 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-error\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235251 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-router-certs\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235275 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3b03bd08-f9d0-4054-a9b2-6b3678a84723-audit-dir\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235300 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-service-ca\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235361 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235376 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235390 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlds9\" (UniqueName: \"kubernetes.io/projected/86d5b7db-0c22-4446-9bff-2ff5493f9288-kube-api-access-jlds9\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235407 5081 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235422 5081 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/86d5b7db-0c22-4446-9bff-2ff5493f9288-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235436 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235452 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235464 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235476 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235489 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235506 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235520 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235532 5081 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/86d5b7db-0c22-4446-9bff-2ff5493f9288-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.235936 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-audit-policies\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.236190 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-service-ca\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.236766 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3b03bd08-f9d0-4054-a9b2-6b3678a84723-audit-dir\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.237967 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-cliconfig\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.238222 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.240652 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.241065 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-error\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.241157 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.241224 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-session\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.241414 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-user-template-login\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.241665 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-router-certs\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.242088 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.248602 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3b03bd08-f9d0-4054-a9b2-6b3678a84723-v4-0-config-system-serving-cert\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.278666 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd4kl\" (UniqueName: \"kubernetes.io/projected/3b03bd08-f9d0-4054-a9b2-6b3678a84723-kube-api-access-qd4kl\") pod \"oauth-openshift-85d9bf6778-n5jj6\" (UID: \"3b03bd08-f9d0-4054-a9b2-6b3678a84723\") " pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.332319 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.586099 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-85d9bf6778-n5jj6"] Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.631797 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" event={"ID":"3b03bd08-f9d0-4054-a9b2-6b3678a84723","Type":"ContainerStarted","Data":"dadd81860b14a29b113d21e78f5a76a681e16afaadcdfc0873ab100603fa9202"} Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.634937 5081 generic.go:334] "Generic (PLEG): container finished" podID="86d5b7db-0c22-4446-9bff-2ff5493f9288" containerID="83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c" exitCode=0 Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.634990 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" event={"ID":"86d5b7db-0c22-4446-9bff-2ff5493f9288","Type":"ContainerDied","Data":"83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c"} Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.635023 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.635075 5081 scope.go:117] "RemoveContainer" containerID="83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.635058 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ntvnz" event={"ID":"86d5b7db-0c22-4446-9bff-2ff5493f9288","Type":"ContainerDied","Data":"20a4ab50cc4c73133827053d5f8edc116b7d8a5f20cf1f4092e7cae5028276cb"} Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.669978 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ntvnz"] Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.673369 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ntvnz"] Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.673937 5081 scope.go:117] "RemoveContainer" containerID="83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c" Oct 03 15:32:40 crc kubenswrapper[5081]: E1003 15:32:40.674446 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c\": container with ID starting with 83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c not found: ID does not exist" containerID="83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c" Oct 03 15:32:40 crc kubenswrapper[5081]: I1003 15:32:40.674478 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c"} err="failed to get container status \"83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c\": rpc error: code = NotFound desc = could not find container \"83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c\": container with ID starting with 83fac704e1f26870196ee72a797342581e3a613875065b086ce75e268d982c1c not found: ID does not exist" Oct 03 15:32:41 crc kubenswrapper[5081]: I1003 15:32:41.644735 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" event={"ID":"3b03bd08-f9d0-4054-a9b2-6b3678a84723","Type":"ContainerStarted","Data":"987fb951448c8748b5042bfa16ed0765ec4f06d1b26a3729c81374cf31579632"} Oct 03 15:32:41 crc kubenswrapper[5081]: I1003 15:32:41.645355 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:41 crc kubenswrapper[5081]: I1003 15:32:41.651922 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" Oct 03 15:32:41 crc kubenswrapper[5081]: I1003 15:32:41.680327 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-85d9bf6778-n5jj6" podStartSLOduration=27.680302067 podStartE2EDuration="27.680302067s" podCreationTimestamp="2025-10-03 15:32:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:32:41.678145375 +0000 UTC m=+280.643702008" watchObservedRunningTime="2025-10-03 15:32:41.680302067 +0000 UTC m=+280.645858690" Oct 03 15:32:41 crc kubenswrapper[5081]: I1003 15:32:41.834317 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86d5b7db-0c22-4446-9bff-2ff5493f9288" path="/var/lib/kubelet/pods/86d5b7db-0c22-4446-9bff-2ff5493f9288/volumes" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.600291 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6pt7z"] Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.601677 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6pt7z" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="registry-server" containerID="cri-o://b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c" gracePeriod=30 Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.614029 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-phpqk"] Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.614545 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-phpqk" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="registry-server" containerID="cri-o://cd1bc455f34051d89197d11cc86960120897326031562230af13318437f849de" gracePeriod=30 Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.626052 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8p57q"] Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.626586 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" podUID="31ab5383-1898-4964-8e8b-406b81b83fab" containerName="marketplace-operator" containerID="cri-o://7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508" gracePeriod=30 Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.639950 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg69b"] Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.640436 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fg69b" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="registry-server" containerID="cri-o://c4e3dbd961ae262037229fd0208b39897bc87eb5182fcfd9d4e350b2854c078c" gracePeriod=30 Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.649148 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b4b85"] Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.649543 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b4b85" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="registry-server" containerID="cri-o://34d7df48f18635f90616566908f9e36bbcad5e7fb390b5ebd840d588cf2fe832" gracePeriod=30 Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.662368 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-chvtb"] Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.663158 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.670383 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-chvtb"] Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.804394 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79683909-dba0-4c91-9a78-09bdbc9da494-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.804481 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw5ml\" (UniqueName: \"kubernetes.io/projected/79683909-dba0-4c91-9a78-09bdbc9da494-kube-api-access-xw5ml\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.804548 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/79683909-dba0-4c91-9a78-09bdbc9da494-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.906441 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/79683909-dba0-4c91-9a78-09bdbc9da494-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.906546 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79683909-dba0-4c91-9a78-09bdbc9da494-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.906602 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw5ml\" (UniqueName: \"kubernetes.io/projected/79683909-dba0-4c91-9a78-09bdbc9da494-kube-api-access-xw5ml\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.908498 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79683909-dba0-4c91-9a78-09bdbc9da494-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.922627 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/79683909-dba0-4c91-9a78-09bdbc9da494-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.926613 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw5ml\" (UniqueName: \"kubernetes.io/projected/79683909-dba0-4c91-9a78-09bdbc9da494-kube-api-access-xw5ml\") pod \"marketplace-operator-79b997595-chvtb\" (UID: \"79683909-dba0-4c91-9a78-09bdbc9da494\") " pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:57 crc kubenswrapper[5081]: I1003 15:32:57.988422 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.402290 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-chvtb"] Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.545956 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.639649 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.720822 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-catalog-content\") pod \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.721468 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-utilities\") pod \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.721528 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nllj\" (UniqueName: \"kubernetes.io/projected/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-kube-api-access-9nllj\") pod \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\" (UID: \"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d\") " Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.722529 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-utilities" (OuterVolumeSpecName: "utilities") pod "c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" (UID: "c5fac9b5-842e-4913-8c2f-6cfb992f3e1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.734004 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-kube-api-access-9nllj" (OuterVolumeSpecName: "kube-api-access-9nllj") pod "c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" (UID: "c5fac9b5-842e-4913-8c2f-6cfb992f3e1d"). InnerVolumeSpecName "kube-api-access-9nllj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.772649 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" event={"ID":"79683909-dba0-4c91-9a78-09bdbc9da494","Type":"ContainerStarted","Data":"e787ee9f1bd08346cca045d46d39587750f9434ccdd3aef982ad77bda435df49"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.772714 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" event={"ID":"79683909-dba0-4c91-9a78-09bdbc9da494","Type":"ContainerStarted","Data":"f7d1143b4178f7607cce39d89c9c34d24083f196bdeeb9613daff3a9eb2de9f2"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.773980 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.779029 5081 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-chvtb container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.779220 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" podUID="79683909-dba0-4c91-9a78-09bdbc9da494" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.785990 5081 generic.go:334] "Generic (PLEG): container finished" podID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerID="34d7df48f18635f90616566908f9e36bbcad5e7fb390b5ebd840d588cf2fe832" exitCode=0 Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.786385 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b4b85" event={"ID":"6c3b8de3-3277-44c9-b6d8-20f784938901","Type":"ContainerDied","Data":"34d7df48f18635f90616566908f9e36bbcad5e7fb390b5ebd840d588cf2fe832"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.786515 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b4b85" event={"ID":"6c3b8de3-3277-44c9-b6d8-20f784938901","Type":"ContainerDied","Data":"61b1fb73fd73cbd99ee880f8152de16008334afdb54803b55390a0fdd55c9911"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.786530 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61b1fb73fd73cbd99ee880f8152de16008334afdb54803b55390a0fdd55c9911" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.810246 5081 generic.go:334] "Generic (PLEG): container finished" podID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerID="b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c" exitCode=0 Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.810334 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6pt7z" event={"ID":"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d","Type":"ContainerDied","Data":"b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.810374 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6pt7z" event={"ID":"c5fac9b5-842e-4913-8c2f-6cfb992f3e1d","Type":"ContainerDied","Data":"0453823266cdf3b5ca947a6fc24ef842e3817bdceb1a475c58962427100bec93"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.810398 5081 scope.go:117] "RemoveContainer" containerID="b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.810575 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6pt7z" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.820641 5081 generic.go:334] "Generic (PLEG): container finished" podID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerID="c4e3dbd961ae262037229fd0208b39897bc87eb5182fcfd9d4e350b2854c078c" exitCode=0 Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.820754 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg69b" event={"ID":"43899581-01e6-4eda-87cd-33f5b3d6879b","Type":"ContainerDied","Data":"c4e3dbd961ae262037229fd0208b39897bc87eb5182fcfd9d4e350b2854c078c"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.823345 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-operator-metrics\") pod \"31ab5383-1898-4964-8e8b-406b81b83fab\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.823422 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdwht\" (UniqueName: \"kubernetes.io/projected/31ab5383-1898-4964-8e8b-406b81b83fab-kube-api-access-vdwht\") pod \"31ab5383-1898-4964-8e8b-406b81b83fab\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.823502 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-trusted-ca\") pod \"31ab5383-1898-4964-8e8b-406b81b83fab\" (UID: \"31ab5383-1898-4964-8e8b-406b81b83fab\") " Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.823820 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.823847 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nllj\" (UniqueName: \"kubernetes.io/projected/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-kube-api-access-9nllj\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.824821 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "31ab5383-1898-4964-8e8b-406b81b83fab" (UID: "31ab5383-1898-4964-8e8b-406b81b83fab"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.828361 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "31ab5383-1898-4964-8e8b-406b81b83fab" (UID: "31ab5383-1898-4964-8e8b-406b81b83fab"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.828745 5081 generic.go:334] "Generic (PLEG): container finished" podID="31ab5383-1898-4964-8e8b-406b81b83fab" containerID="7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508" exitCode=0 Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.828841 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" event={"ID":"31ab5383-1898-4964-8e8b-406b81b83fab","Type":"ContainerDied","Data":"7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.828878 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.828903 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8p57q" event={"ID":"31ab5383-1898-4964-8e8b-406b81b83fab","Type":"ContainerDied","Data":"7df8465742cdf8b03806db6f7730fa0b04919afc47d2c596823d8e57a3ea26c1"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.829004 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31ab5383-1898-4964-8e8b-406b81b83fab-kube-api-access-vdwht" (OuterVolumeSpecName: "kube-api-access-vdwht") pod "31ab5383-1898-4964-8e8b-406b81b83fab" (UID: "31ab5383-1898-4964-8e8b-406b81b83fab"). InnerVolumeSpecName "kube-api-access-vdwht". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.831225 5081 generic.go:334] "Generic (PLEG): container finished" podID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerID="cd1bc455f34051d89197d11cc86960120897326031562230af13318437f849de" exitCode=0 Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.831285 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phpqk" event={"ID":"a20eff2d-16a6-4023-ae43-29d16ed9c041","Type":"ContainerDied","Data":"cd1bc455f34051d89197d11cc86960120897326031562230af13318437f849de"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.831306 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phpqk" event={"ID":"a20eff2d-16a6-4023-ae43-29d16ed9c041","Type":"ContainerDied","Data":"40522bce61e7fc6947a29641c1eaf38b60f4ed7feda9994a2e550eea24c3c538"} Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.831318 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40522bce61e7fc6947a29641c1eaf38b60f4ed7feda9994a2e550eea24c3c538" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.843300 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" (UID: "c5fac9b5-842e-4913-8c2f-6cfb992f3e1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.860290 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.864904 5081 scope.go:117] "RemoveContainer" containerID="1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.872348 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" podStartSLOduration=1.8723204390000001 podStartE2EDuration="1.872320439s" podCreationTimestamp="2025-10-03 15:32:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:32:58.811215197 +0000 UTC m=+297.776771840" watchObservedRunningTime="2025-10-03 15:32:58.872320439 +0000 UTC m=+297.837877092" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.876174 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8p57q"] Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.879229 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8p57q"] Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.883586 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.894030 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.917492 5081 scope.go:117] "RemoveContainer" containerID="5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.925503 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.925643 5081 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.925665 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdwht\" (UniqueName: \"kubernetes.io/projected/31ab5383-1898-4964-8e8b-406b81b83fab-kube-api-access-vdwht\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.925679 5081 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31ab5383-1898-4964-8e8b-406b81b83fab-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.946862 5081 scope.go:117] "RemoveContainer" containerID="b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c" Oct 03 15:32:58 crc kubenswrapper[5081]: E1003 15:32:58.947392 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c\": container with ID starting with b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c not found: ID does not exist" containerID="b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.947428 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c"} err="failed to get container status \"b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c\": rpc error: code = NotFound desc = could not find container \"b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c\": container with ID starting with b527f756d2368546620fed14312f9fca54793e00d6c1e34cc77afef86226644c not found: ID does not exist" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.947454 5081 scope.go:117] "RemoveContainer" containerID="1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3" Oct 03 15:32:58 crc kubenswrapper[5081]: E1003 15:32:58.949431 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3\": container with ID starting with 1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3 not found: ID does not exist" containerID="1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.949507 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3"} err="failed to get container status \"1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3\": rpc error: code = NotFound desc = could not find container \"1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3\": container with ID starting with 1c01fdbb623b93f95bf0c3fa2e080b83f117fbc096ea8760e0bca11e120f09a3 not found: ID does not exist" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.949548 5081 scope.go:117] "RemoveContainer" containerID="5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb" Oct 03 15:32:58 crc kubenswrapper[5081]: E1003 15:32:58.954608 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb\": container with ID starting with 5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb not found: ID does not exist" containerID="5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.954689 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb"} err="failed to get container status \"5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb\": rpc error: code = NotFound desc = could not find container \"5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb\": container with ID starting with 5c247add88a3fbd9c8a16e12a0178de64db0229cf15706a5df796dc67cdecdeb not found: ID does not exist" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.954733 5081 scope.go:117] "RemoveContainer" containerID="7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.986847 5081 scope.go:117] "RemoveContainer" containerID="7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508" Oct 03 15:32:58 crc kubenswrapper[5081]: E1003 15:32:58.987944 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508\": container with ID starting with 7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508 not found: ID does not exist" containerID="7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508" Oct 03 15:32:58 crc kubenswrapper[5081]: I1003 15:32:58.988031 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508"} err="failed to get container status \"7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508\": rpc error: code = NotFound desc = could not find container \"7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508\": container with ID starting with 7526b745c13deef17d2cd5cac719e8538410cd33d79cf4c15611f380e938a508 not found: ID does not exist" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027121 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzps8\" (UniqueName: \"kubernetes.io/projected/6c3b8de3-3277-44c9-b6d8-20f784938901-kube-api-access-kzps8\") pod \"6c3b8de3-3277-44c9-b6d8-20f784938901\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027193 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-utilities\") pod \"6c3b8de3-3277-44c9-b6d8-20f784938901\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027301 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-utilities\") pod \"a20eff2d-16a6-4023-ae43-29d16ed9c041\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027337 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-catalog-content\") pod \"43899581-01e6-4eda-87cd-33f5b3d6879b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027398 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-utilities\") pod \"43899581-01e6-4eda-87cd-33f5b3d6879b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027440 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-catalog-content\") pod \"a20eff2d-16a6-4023-ae43-29d16ed9c041\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027464 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-catalog-content\") pod \"6c3b8de3-3277-44c9-b6d8-20f784938901\" (UID: \"6c3b8de3-3277-44c9-b6d8-20f784938901\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027519 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8qdd\" (UniqueName: \"kubernetes.io/projected/a20eff2d-16a6-4023-ae43-29d16ed9c041-kube-api-access-j8qdd\") pod \"a20eff2d-16a6-4023-ae43-29d16ed9c041\" (UID: \"a20eff2d-16a6-4023-ae43-29d16ed9c041\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.027547 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwklt\" (UniqueName: \"kubernetes.io/projected/43899581-01e6-4eda-87cd-33f5b3d6879b-kube-api-access-xwklt\") pod \"43899581-01e6-4eda-87cd-33f5b3d6879b\" (UID: \"43899581-01e6-4eda-87cd-33f5b3d6879b\") " Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.029731 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-utilities" (OuterVolumeSpecName: "utilities") pod "6c3b8de3-3277-44c9-b6d8-20f784938901" (UID: "6c3b8de3-3277-44c9-b6d8-20f784938901"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.029958 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-utilities" (OuterVolumeSpecName: "utilities") pod "a20eff2d-16a6-4023-ae43-29d16ed9c041" (UID: "a20eff2d-16a6-4023-ae43-29d16ed9c041"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.031871 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a20eff2d-16a6-4023-ae43-29d16ed9c041-kube-api-access-j8qdd" (OuterVolumeSpecName: "kube-api-access-j8qdd") pod "a20eff2d-16a6-4023-ae43-29d16ed9c041" (UID: "a20eff2d-16a6-4023-ae43-29d16ed9c041"). InnerVolumeSpecName "kube-api-access-j8qdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.035191 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c3b8de3-3277-44c9-b6d8-20f784938901-kube-api-access-kzps8" (OuterVolumeSpecName: "kube-api-access-kzps8") pod "6c3b8de3-3277-44c9-b6d8-20f784938901" (UID: "6c3b8de3-3277-44c9-b6d8-20f784938901"). InnerVolumeSpecName "kube-api-access-kzps8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.041267 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-utilities" (OuterVolumeSpecName: "utilities") pod "43899581-01e6-4eda-87cd-33f5b3d6879b" (UID: "43899581-01e6-4eda-87cd-33f5b3d6879b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.041818 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43899581-01e6-4eda-87cd-33f5b3d6879b-kube-api-access-xwklt" (OuterVolumeSpecName: "kube-api-access-xwklt") pod "43899581-01e6-4eda-87cd-33f5b3d6879b" (UID: "43899581-01e6-4eda-87cd-33f5b3d6879b"). InnerVolumeSpecName "kube-api-access-xwklt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.057709 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43899581-01e6-4eda-87cd-33f5b3d6879b" (UID: "43899581-01e6-4eda-87cd-33f5b3d6879b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.086359 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a20eff2d-16a6-4023-ae43-29d16ed9c041" (UID: "a20eff2d-16a6-4023-ae43-29d16ed9c041"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129012 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129067 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129082 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43899581-01e6-4eda-87cd-33f5b3d6879b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129091 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a20eff2d-16a6-4023-ae43-29d16ed9c041-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129101 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8qdd\" (UniqueName: \"kubernetes.io/projected/a20eff2d-16a6-4023-ae43-29d16ed9c041-kube-api-access-j8qdd\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129116 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwklt\" (UniqueName: \"kubernetes.io/projected/43899581-01e6-4eda-87cd-33f5b3d6879b-kube-api-access-xwklt\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129125 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzps8\" (UniqueName: \"kubernetes.io/projected/6c3b8de3-3277-44c9-b6d8-20f784938901-kube-api-access-kzps8\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.129136 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.141265 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6pt7z"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.144608 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6pt7z"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.163068 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c3b8de3-3277-44c9-b6d8-20f784938901" (UID: "6c3b8de3-3277-44c9-b6d8-20f784938901"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.230067 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c3b8de3-3277-44c9-b6d8-20f784938901-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.822770 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pf89b"] Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823076 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823095 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823110 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823120 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823133 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823141 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823150 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823156 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823164 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ab5383-1898-4964-8e8b-406b81b83fab" containerName="marketplace-operator" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823172 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ab5383-1898-4964-8e8b-406b81b83fab" containerName="marketplace-operator" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823181 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823187 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823195 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823201 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823209 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823216 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823223 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823230 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823239 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823245 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823254 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823261 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="extract-content" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823271 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823277 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: E1003 15:32:59.823287 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823296 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="extract-utilities" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823389 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823397 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823406 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823417 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" containerName="registry-server" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.823429 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="31ab5383-1898-4964-8e8b-406b81b83fab" containerName="marketplace-operator" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.824315 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.827801 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.838002 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31ab5383-1898-4964-8e8b-406b81b83fab" path="/var/lib/kubelet/pods/31ab5383-1898-4964-8e8b-406b81b83fab/volumes" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.838667 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5fac9b5-842e-4913-8c2f-6cfb992f3e1d" path="/var/lib/kubelet/pods/c5fac9b5-842e-4913-8c2f-6cfb992f3e1d/volumes" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.839891 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pf89b"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.842915 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg69b" event={"ID":"43899581-01e6-4eda-87cd-33f5b3d6879b","Type":"ContainerDied","Data":"a27592e9d669ebdd06bbb5177d4de7e2e6e6e44cb438c072fabcb49f901c4ed7"} Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.842986 5081 scope.go:117] "RemoveContainer" containerID="c4e3dbd961ae262037229fd0208b39897bc87eb5182fcfd9d4e350b2854c078c" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.843148 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg69b" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.845077 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phpqk" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.846667 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b4b85" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.852088 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-chvtb" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.862408 5081 scope.go:117] "RemoveContainer" containerID="887322dadd1b91f6f3ee9e66f37e37912d9c0557d0254f315ebc894a6f37c77c" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.886434 5081 scope.go:117] "RemoveContainer" containerID="33a550ecbb68dd5d7a29385a58ffe4dd48e35fc96108974bbd5559503063f3f5" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.915782 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg69b"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.925020 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg69b"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.941413 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66qn9\" (UniqueName: \"kubernetes.io/projected/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-kube-api-access-66qn9\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.941810 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-utilities\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.942065 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-catalog-content\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.942276 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-phpqk"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.946231 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-phpqk"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.960110 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b4b85"] Oct 03 15:32:59 crc kubenswrapper[5081]: I1003 15:32:59.966730 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b4b85"] Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.043236 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66qn9\" (UniqueName: \"kubernetes.io/projected/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-kube-api-access-66qn9\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.043332 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-utilities\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.043394 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-catalog-content\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.043969 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-catalog-content\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.044415 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-utilities\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.071719 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66qn9\" (UniqueName: \"kubernetes.io/projected/bcbbdb6b-7c85-4ec3-87ab-45560ec82d96-kube-api-access-66qn9\") pod \"certified-operators-pf89b\" (UID: \"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96\") " pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.153688 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.393733 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pf89b"] Oct 03 15:33:00 crc kubenswrapper[5081]: W1003 15:33:00.407156 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcbbdb6b_7c85_4ec3_87ab_45560ec82d96.slice/crio-45bfc11a601411a2a10fb16ed189581e5da667296782bb5393e7fccbbfa05cd8 WatchSource:0}: Error finding container 45bfc11a601411a2a10fb16ed189581e5da667296782bb5393e7fccbbfa05cd8: Status 404 returned error can't find the container with id 45bfc11a601411a2a10fb16ed189581e5da667296782bb5393e7fccbbfa05cd8 Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.862384 5081 generic.go:334] "Generic (PLEG): container finished" podID="bcbbdb6b-7c85-4ec3-87ab-45560ec82d96" containerID="a5decc0098e288b0439fdc095684228f09d66756bdc4869e5152f61d1eb06187" exitCode=0 Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.862481 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pf89b" event={"ID":"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96","Type":"ContainerDied","Data":"a5decc0098e288b0439fdc095684228f09d66756bdc4869e5152f61d1eb06187"} Oct 03 15:33:00 crc kubenswrapper[5081]: I1003 15:33:00.862520 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pf89b" event={"ID":"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96","Type":"ContainerStarted","Data":"45bfc11a601411a2a10fb16ed189581e5da667296782bb5393e7fccbbfa05cd8"} Oct 03 15:33:01 crc kubenswrapper[5081]: I1003 15:33:01.836202 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43899581-01e6-4eda-87cd-33f5b3d6879b" path="/var/lib/kubelet/pods/43899581-01e6-4eda-87cd-33f5b3d6879b/volumes" Oct 03 15:33:01 crc kubenswrapper[5081]: I1003 15:33:01.837611 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c3b8de3-3277-44c9-b6d8-20f784938901" path="/var/lib/kubelet/pods/6c3b8de3-3277-44c9-b6d8-20f784938901/volumes" Oct 03 15:33:01 crc kubenswrapper[5081]: I1003 15:33:01.838322 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a20eff2d-16a6-4023-ae43-29d16ed9c041" path="/var/lib/kubelet/pods/a20eff2d-16a6-4023-ae43-29d16ed9c041/volumes" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.033383 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vd4dm"] Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.037458 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.044293 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vd4dm"] Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.044722 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.172502 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd58fd3-9571-4d36-a37b-b6cf4337e792-utilities\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.172616 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd58fd3-9571-4d36-a37b-b6cf4337e792-catalog-content\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.172687 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgq49\" (UniqueName: \"kubernetes.io/projected/2cd58fd3-9571-4d36-a37b-b6cf4337e792-kube-api-access-sgq49\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.228769 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9bwhw"] Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.231328 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.236806 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.243890 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9bwhw"] Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.274661 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd58fd3-9571-4d36-a37b-b6cf4337e792-utilities\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.274723 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd58fd3-9571-4d36-a37b-b6cf4337e792-catalog-content\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.274779 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgq49\" (UniqueName: \"kubernetes.io/projected/2cd58fd3-9571-4d36-a37b-b6cf4337e792-kube-api-access-sgq49\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.276829 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd58fd3-9571-4d36-a37b-b6cf4337e792-utilities\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.278081 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd58fd3-9571-4d36-a37b-b6cf4337e792-catalog-content\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.297620 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgq49\" (UniqueName: \"kubernetes.io/projected/2cd58fd3-9571-4d36-a37b-b6cf4337e792-kube-api-access-sgq49\") pod \"redhat-operators-vd4dm\" (UID: \"2cd58fd3-9571-4d36-a37b-b6cf4337e792\") " pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.359911 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.376443 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d28813c-7855-40f7-bdbc-3b4541272950-catalog-content\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.376535 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rqpl\" (UniqueName: \"kubernetes.io/projected/7d28813c-7855-40f7-bdbc-3b4541272950-kube-api-access-7rqpl\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.376648 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d28813c-7855-40f7-bdbc-3b4541272950-utilities\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.477489 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d28813c-7855-40f7-bdbc-3b4541272950-catalog-content\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.477971 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rqpl\" (UniqueName: \"kubernetes.io/projected/7d28813c-7855-40f7-bdbc-3b4541272950-kube-api-access-7rqpl\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.478139 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d28813c-7855-40f7-bdbc-3b4541272950-utilities\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.478370 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d28813c-7855-40f7-bdbc-3b4541272950-catalog-content\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.478778 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d28813c-7855-40f7-bdbc-3b4541272950-utilities\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.512928 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rqpl\" (UniqueName: \"kubernetes.io/projected/7d28813c-7855-40f7-bdbc-3b4541272950-kube-api-access-7rqpl\") pod \"community-operators-9bwhw\" (UID: \"7d28813c-7855-40f7-bdbc-3b4541272950\") " pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.551476 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.748455 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9bwhw"] Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.779059 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vd4dm"] Oct 03 15:33:02 crc kubenswrapper[5081]: W1003 15:33:02.783181 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cd58fd3_9571_4d36_a37b_b6cf4337e792.slice/crio-84f1448e857cf044c4f17ebda65b2256dd17a42c5648a2fa068032f9b14f261d WatchSource:0}: Error finding container 84f1448e857cf044c4f17ebda65b2256dd17a42c5648a2fa068032f9b14f261d: Status 404 returned error can't find the container with id 84f1448e857cf044c4f17ebda65b2256dd17a42c5648a2fa068032f9b14f261d Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.874288 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd4dm" event={"ID":"2cd58fd3-9571-4d36-a37b-b6cf4337e792","Type":"ContainerStarted","Data":"84f1448e857cf044c4f17ebda65b2256dd17a42c5648a2fa068032f9b14f261d"} Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.876689 5081 generic.go:334] "Generic (PLEG): container finished" podID="bcbbdb6b-7c85-4ec3-87ab-45560ec82d96" containerID="214d3e8e591cbd0dd2e70d727a3b772b5aacaf28240ec27c3329dee55002a985" exitCode=0 Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.876772 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pf89b" event={"ID":"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96","Type":"ContainerDied","Data":"214d3e8e591cbd0dd2e70d727a3b772b5aacaf28240ec27c3329dee55002a985"} Oct 03 15:33:02 crc kubenswrapper[5081]: I1003 15:33:02.880045 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9bwhw" event={"ID":"7d28813c-7855-40f7-bdbc-3b4541272950","Type":"ContainerStarted","Data":"8bcaa30131cf766d9c087e7740056527cc0f14918c6651fb7e2951adab26b311"} Oct 03 15:33:03 crc kubenswrapper[5081]: I1003 15:33:03.885886 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pf89b" event={"ID":"bcbbdb6b-7c85-4ec3-87ab-45560ec82d96","Type":"ContainerStarted","Data":"f10af2b74e8735e7d178eab36b7f246c42a8d0500fec52059111ca711e249bba"} Oct 03 15:33:03 crc kubenswrapper[5081]: I1003 15:33:03.888518 5081 generic.go:334] "Generic (PLEG): container finished" podID="2cd58fd3-9571-4d36-a37b-b6cf4337e792" containerID="7d95054fd6628d0d7ec71f0f6adfb897e6417c6dfdfa0404d76fc3239edeb345" exitCode=0 Oct 03 15:33:03 crc kubenswrapper[5081]: I1003 15:33:03.888577 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd4dm" event={"ID":"2cd58fd3-9571-4d36-a37b-b6cf4337e792","Type":"ContainerDied","Data":"7d95054fd6628d0d7ec71f0f6adfb897e6417c6dfdfa0404d76fc3239edeb345"} Oct 03 15:33:03 crc kubenswrapper[5081]: I1003 15:33:03.889889 5081 generic.go:334] "Generic (PLEG): container finished" podID="7d28813c-7855-40f7-bdbc-3b4541272950" containerID="81090a2d8cd644e40669f7a6fa80895a9476910056be551aa5f6fe2091daa01c" exitCode=0 Oct 03 15:33:03 crc kubenswrapper[5081]: I1003 15:33:03.890172 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9bwhw" event={"ID":"7d28813c-7855-40f7-bdbc-3b4541272950","Type":"ContainerDied","Data":"81090a2d8cd644e40669f7a6fa80895a9476910056be551aa5f6fe2091daa01c"} Oct 03 15:33:03 crc kubenswrapper[5081]: I1003 15:33:03.910833 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pf89b" podStartSLOduration=2.490124365 podStartE2EDuration="4.91080679s" podCreationTimestamp="2025-10-03 15:32:59 +0000 UTC" firstStartedPulling="2025-10-03 15:33:00.865350514 +0000 UTC m=+299.830907137" lastFinishedPulling="2025-10-03 15:33:03.286032949 +0000 UTC m=+302.251589562" observedRunningTime="2025-10-03 15:33:03.906892538 +0000 UTC m=+302.872449151" watchObservedRunningTime="2025-10-03 15:33:03.91080679 +0000 UTC m=+302.876363413" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.426576 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vqq89"] Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.427727 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.432277 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.446504 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vqq89"] Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.612184 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnxgb\" (UniqueName: \"kubernetes.io/projected/fabdc78f-4187-4f36-82a3-ac9d05990b5a-kube-api-access-mnxgb\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.612302 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabdc78f-4187-4f36-82a3-ac9d05990b5a-utilities\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.612359 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabdc78f-4187-4f36-82a3-ac9d05990b5a-catalog-content\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.713884 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabdc78f-4187-4f36-82a3-ac9d05990b5a-utilities\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.713970 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabdc78f-4187-4f36-82a3-ac9d05990b5a-catalog-content\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.714029 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnxgb\" (UniqueName: \"kubernetes.io/projected/fabdc78f-4187-4f36-82a3-ac9d05990b5a-kube-api-access-mnxgb\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.715339 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabdc78f-4187-4f36-82a3-ac9d05990b5a-catalog-content\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.715380 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabdc78f-4187-4f36-82a3-ac9d05990b5a-utilities\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.745206 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnxgb\" (UniqueName: \"kubernetes.io/projected/fabdc78f-4187-4f36-82a3-ac9d05990b5a-kube-api-access-mnxgb\") pod \"redhat-marketplace-vqq89\" (UID: \"fabdc78f-4187-4f36-82a3-ac9d05990b5a\") " pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.748187 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.920858 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd4dm" event={"ID":"2cd58fd3-9571-4d36-a37b-b6cf4337e792","Type":"ContainerStarted","Data":"b0c1dcb0b5a2af93d79ddd1c2a49e22aa128e53983ae98980f1d366ac5c16e83"} Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.924337 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9bwhw" event={"ID":"7d28813c-7855-40f7-bdbc-3b4541272950","Type":"ContainerStarted","Data":"6ba807478fd4bfe2002873c85bc9ffe747ca58083a0c7b26ed657ed5b4ce3003"} Oct 03 15:33:04 crc kubenswrapper[5081]: I1003 15:33:04.980746 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vqq89"] Oct 03 15:33:05 crc kubenswrapper[5081]: I1003 15:33:05.939514 5081 generic.go:334] "Generic (PLEG): container finished" podID="fabdc78f-4187-4f36-82a3-ac9d05990b5a" containerID="5b6f630230eaba90be74228fa81d0cdbc663a971e600793ae8a0223e52e9f5e9" exitCode=0 Oct 03 15:33:05 crc kubenswrapper[5081]: I1003 15:33:05.939995 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vqq89" event={"ID":"fabdc78f-4187-4f36-82a3-ac9d05990b5a","Type":"ContainerDied","Data":"5b6f630230eaba90be74228fa81d0cdbc663a971e600793ae8a0223e52e9f5e9"} Oct 03 15:33:05 crc kubenswrapper[5081]: I1003 15:33:05.940033 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vqq89" event={"ID":"fabdc78f-4187-4f36-82a3-ac9d05990b5a","Type":"ContainerStarted","Data":"ab3ef433f20bbbf334f52f9a0b929e3894cd33a9c8e13e2704f1e7bf4bb0aab7"} Oct 03 15:33:05 crc kubenswrapper[5081]: I1003 15:33:05.946145 5081 generic.go:334] "Generic (PLEG): container finished" podID="2cd58fd3-9571-4d36-a37b-b6cf4337e792" containerID="b0c1dcb0b5a2af93d79ddd1c2a49e22aa128e53983ae98980f1d366ac5c16e83" exitCode=0 Oct 03 15:33:05 crc kubenswrapper[5081]: I1003 15:33:05.946205 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd4dm" event={"ID":"2cd58fd3-9571-4d36-a37b-b6cf4337e792","Type":"ContainerDied","Data":"b0c1dcb0b5a2af93d79ddd1c2a49e22aa128e53983ae98980f1d366ac5c16e83"} Oct 03 15:33:05 crc kubenswrapper[5081]: I1003 15:33:05.951621 5081 generic.go:334] "Generic (PLEG): container finished" podID="7d28813c-7855-40f7-bdbc-3b4541272950" containerID="6ba807478fd4bfe2002873c85bc9ffe747ca58083a0c7b26ed657ed5b4ce3003" exitCode=0 Oct 03 15:33:05 crc kubenswrapper[5081]: I1003 15:33:05.952000 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9bwhw" event={"ID":"7d28813c-7855-40f7-bdbc-3b4541272950","Type":"ContainerDied","Data":"6ba807478fd4bfe2002873c85bc9ffe747ca58083a0c7b26ed657ed5b4ce3003"} Oct 03 15:33:06 crc kubenswrapper[5081]: I1003 15:33:06.963808 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd4dm" event={"ID":"2cd58fd3-9571-4d36-a37b-b6cf4337e792","Type":"ContainerStarted","Data":"d5ca568263d2683a497b505eb92e07af79827f5cf66ff6f7954480e39d3f332e"} Oct 03 15:33:06 crc kubenswrapper[5081]: I1003 15:33:06.969395 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9bwhw" event={"ID":"7d28813c-7855-40f7-bdbc-3b4541272950","Type":"ContainerStarted","Data":"8acf32463414850d3ec17b2a4400f00abfd2b845b763d92b3a6cdc9ca662e45f"} Oct 03 15:33:06 crc kubenswrapper[5081]: I1003 15:33:06.993015 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vd4dm" podStartSLOduration=2.350816664 podStartE2EDuration="4.992983138s" podCreationTimestamp="2025-10-03 15:33:02 +0000 UTC" firstStartedPulling="2025-10-03 15:33:03.890639202 +0000 UTC m=+302.856195815" lastFinishedPulling="2025-10-03 15:33:06.532805666 +0000 UTC m=+305.498362289" observedRunningTime="2025-10-03 15:33:06.985621697 +0000 UTC m=+305.951178340" watchObservedRunningTime="2025-10-03 15:33:06.992983138 +0000 UTC m=+305.958539751" Oct 03 15:33:07 crc kubenswrapper[5081]: I1003 15:33:07.007838 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9bwhw" podStartSLOduration=2.2988769749999998 podStartE2EDuration="5.007814313s" podCreationTimestamp="2025-10-03 15:33:02 +0000 UTC" firstStartedPulling="2025-10-03 15:33:03.891150967 +0000 UTC m=+302.856707570" lastFinishedPulling="2025-10-03 15:33:06.600088305 +0000 UTC m=+305.565644908" observedRunningTime="2025-10-03 15:33:07.004589841 +0000 UTC m=+305.970146464" watchObservedRunningTime="2025-10-03 15:33:07.007814313 +0000 UTC m=+305.973370926" Oct 03 15:33:07 crc kubenswrapper[5081]: I1003 15:33:07.977499 5081 generic.go:334] "Generic (PLEG): container finished" podID="fabdc78f-4187-4f36-82a3-ac9d05990b5a" containerID="b7db0bdc6e482457eec36a405f06e194da7376619e0eeaa4f664c7a001d1f014" exitCode=0 Oct 03 15:33:07 crc kubenswrapper[5081]: I1003 15:33:07.977610 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vqq89" event={"ID":"fabdc78f-4187-4f36-82a3-ac9d05990b5a","Type":"ContainerDied","Data":"b7db0bdc6e482457eec36a405f06e194da7376619e0eeaa4f664c7a001d1f014"} Oct 03 15:33:08 crc kubenswrapper[5081]: I1003 15:33:08.987280 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vqq89" event={"ID":"fabdc78f-4187-4f36-82a3-ac9d05990b5a","Type":"ContainerStarted","Data":"a04a8d3cbeff14c5c1cc3577e2075af8efeddcd4a5f6c5e7d038870a701b5383"} Oct 03 15:33:10 crc kubenswrapper[5081]: I1003 15:33:10.154702 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:10 crc kubenswrapper[5081]: I1003 15:33:10.154767 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:10 crc kubenswrapper[5081]: I1003 15:33:10.199852 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:10 crc kubenswrapper[5081]: I1003 15:33:10.221191 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vqq89" podStartSLOduration=3.585483973 podStartE2EDuration="6.221170462s" podCreationTimestamp="2025-10-03 15:33:04 +0000 UTC" firstStartedPulling="2025-10-03 15:33:05.943061229 +0000 UTC m=+304.908617832" lastFinishedPulling="2025-10-03 15:33:08.578747708 +0000 UTC m=+307.544304321" observedRunningTime="2025-10-03 15:33:09.005987466 +0000 UTC m=+307.971544089" watchObservedRunningTime="2025-10-03 15:33:10.221170462 +0000 UTC m=+309.186727075" Oct 03 15:33:11 crc kubenswrapper[5081]: I1003 15:33:11.044549 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pf89b" Oct 03 15:33:12 crc kubenswrapper[5081]: I1003 15:33:12.368034 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:12 crc kubenswrapper[5081]: I1003 15:33:12.368138 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:12 crc kubenswrapper[5081]: I1003 15:33:12.422942 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:12 crc kubenswrapper[5081]: I1003 15:33:12.552305 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:12 crc kubenswrapper[5081]: I1003 15:33:12.552380 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:12 crc kubenswrapper[5081]: I1003 15:33:12.594419 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:13 crc kubenswrapper[5081]: I1003 15:33:13.057098 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vd4dm" Oct 03 15:33:13 crc kubenswrapper[5081]: I1003 15:33:13.057678 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9bwhw" Oct 03 15:33:14 crc kubenswrapper[5081]: I1003 15:33:14.749333 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:14 crc kubenswrapper[5081]: I1003 15:33:14.749783 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:14 crc kubenswrapper[5081]: I1003 15:33:14.790548 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:15 crc kubenswrapper[5081]: I1003 15:33:15.085437 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vqq89" Oct 03 15:33:29 crc kubenswrapper[5081]: I1003 15:33:29.871310 5081 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","poda20eff2d-16a6-4023-ae43-29d16ed9c041"] err="unable to destroy cgroup paths for cgroup [kubepods burstable poda20eff2d-16a6-4023-ae43-29d16ed9c041] : Timed out while waiting for systemd to remove kubepods-burstable-poda20eff2d_16a6_4023_ae43_29d16ed9c041.slice" Oct 03 15:34:00 crc kubenswrapper[5081]: I1003 15:34:00.648192 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:34:00 crc kubenswrapper[5081]: I1003 15:34:00.649082 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:34:30 crc kubenswrapper[5081]: I1003 15:34:30.647383 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:34:30 crc kubenswrapper[5081]: I1003 15:34:30.648241 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:35:00 crc kubenswrapper[5081]: I1003 15:35:00.647956 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:35:00 crc kubenswrapper[5081]: I1003 15:35:00.648913 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:35:00 crc kubenswrapper[5081]: I1003 15:35:00.648979 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:35:00 crc kubenswrapper[5081]: I1003 15:35:00.649758 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"682928bd6d83657f2137a748c80e0c603574009da0edd7028e9f7d2287a58ef8"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:35:00 crc kubenswrapper[5081]: I1003 15:35:00.649828 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://682928bd6d83657f2137a748c80e0c603574009da0edd7028e9f7d2287a58ef8" gracePeriod=600 Oct 03 15:35:01 crc kubenswrapper[5081]: I1003 15:35:01.726296 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="682928bd6d83657f2137a748c80e0c603574009da0edd7028e9f7d2287a58ef8" exitCode=0 Oct 03 15:35:01 crc kubenswrapper[5081]: I1003 15:35:01.726402 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"682928bd6d83657f2137a748c80e0c603574009da0edd7028e9f7d2287a58ef8"} Oct 03 15:35:01 crc kubenswrapper[5081]: I1003 15:35:01.726919 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"63af043e610585f95cf2fe036c18efbb219a40ffaa2e79808b6ccfbd888824bd"} Oct 03 15:35:01 crc kubenswrapper[5081]: I1003 15:35:01.726963 5081 scope.go:117] "RemoveContainer" containerID="60f045bc4932bbe8381381ca282330a2171010d84827b918438d3bd8aff1050f" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.150778 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bjg9m"] Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.152925 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.163762 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bjg9m"] Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268613 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615466f8-68c2-462e-b98e-68b393821a3f-trusted-ca\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268684 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/615466f8-68c2-462e-b98e-68b393821a3f-registry-certificates\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268723 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/615466f8-68c2-462e-b98e-68b393821a3f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268771 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-bound-sa-token\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268792 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/615466f8-68c2-462e-b98e-68b393821a3f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268810 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6zk5\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-kube-api-access-j6zk5\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268919 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.268951 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-registry-tls\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.303738 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.369865 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615466f8-68c2-462e-b98e-68b393821a3f-trusted-ca\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.369913 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/615466f8-68c2-462e-b98e-68b393821a3f-registry-certificates\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.369942 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/615466f8-68c2-462e-b98e-68b393821a3f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.369983 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-bound-sa-token\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.370003 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/615466f8-68c2-462e-b98e-68b393821a3f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.370025 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6zk5\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-kube-api-access-j6zk5\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.370058 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-registry-tls\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.371660 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/615466f8-68c2-462e-b98e-68b393821a3f-trusted-ca\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.373316 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/615466f8-68c2-462e-b98e-68b393821a3f-registry-certificates\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.374677 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/615466f8-68c2-462e-b98e-68b393821a3f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.376208 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/615466f8-68c2-462e-b98e-68b393821a3f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.376362 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-registry-tls\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.391470 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6zk5\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-kube-api-access-j6zk5\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.399043 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/615466f8-68c2-462e-b98e-68b393821a3f-bound-sa-token\") pod \"image-registry-66df7c8f76-bjg9m\" (UID: \"615466f8-68c2-462e-b98e-68b393821a3f\") " pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.490070 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:12 crc kubenswrapper[5081]: I1003 15:36:12.699218 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bjg9m"] Oct 03 15:36:13 crc kubenswrapper[5081]: I1003 15:36:13.167411 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" event={"ID":"615466f8-68c2-462e-b98e-68b393821a3f","Type":"ContainerStarted","Data":"4dfd525fe13c59a4d95c68b4a905a82c48f8c52cd86af3922a4203b1ec8bb171"} Oct 03 15:36:13 crc kubenswrapper[5081]: I1003 15:36:13.167510 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" event={"ID":"615466f8-68c2-462e-b98e-68b393821a3f","Type":"ContainerStarted","Data":"2cd1212a984741555afdf9ca53a473d2abcf401b6ad786ff6f6b86b66f137d9e"} Oct 03 15:36:13 crc kubenswrapper[5081]: I1003 15:36:13.168829 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:13 crc kubenswrapper[5081]: I1003 15:36:13.196168 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" podStartSLOduration=1.19613904 podStartE2EDuration="1.19613904s" podCreationTimestamp="2025-10-03 15:36:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:36:13.193061613 +0000 UTC m=+492.158618226" watchObservedRunningTime="2025-10-03 15:36:13.19613904 +0000 UTC m=+492.161695643" Oct 03 15:36:32 crc kubenswrapper[5081]: I1003 15:36:32.498644 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bjg9m" Oct 03 15:36:32 crc kubenswrapper[5081]: I1003 15:36:32.543604 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vk4jz"] Oct 03 15:36:57 crc kubenswrapper[5081]: I1003 15:36:57.594320 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" podUID="86928421-ee7e-4823-9483-80d3d4855283" containerName="registry" containerID="cri-o://81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619" gracePeriod=30 Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.005353 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.083903 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-registry-certificates\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.083966 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whzhq\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-kube-api-access-whzhq\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.084237 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.084286 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-bound-sa-token\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.084341 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86928421-ee7e-4823-9483-80d3d4855283-installation-pull-secrets\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.084386 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-trusted-ca\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.084413 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86928421-ee7e-4823-9483-80d3d4855283-ca-trust-extracted\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.084450 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-registry-tls\") pod \"86928421-ee7e-4823-9483-80d3d4855283\" (UID: \"86928421-ee7e-4823-9483-80d3d4855283\") " Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.085309 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.086117 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.092848 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86928421-ee7e-4823-9483-80d3d4855283-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.093208 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.092883 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.093646 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-kube-api-access-whzhq" (OuterVolumeSpecName: "kube-api-access-whzhq") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "kube-api-access-whzhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.099323 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.106939 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86928421-ee7e-4823-9483-80d3d4855283-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "86928421-ee7e-4823-9483-80d3d4855283" (UID: "86928421-ee7e-4823-9483-80d3d4855283"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.186231 5081 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.186288 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whzhq\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-kube-api-access-whzhq\") on node \"crc\" DevicePath \"\"" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.186303 5081 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.186316 5081 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/86928421-ee7e-4823-9483-80d3d4855283-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.186332 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/86928421-ee7e-4823-9483-80d3d4855283-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.186345 5081 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/86928421-ee7e-4823-9483-80d3d4855283-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.186356 5081 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/86928421-ee7e-4823-9483-80d3d4855283-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.467253 5081 generic.go:334] "Generic (PLEG): container finished" podID="86928421-ee7e-4823-9483-80d3d4855283" containerID="81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619" exitCode=0 Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.467327 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" event={"ID":"86928421-ee7e-4823-9483-80d3d4855283","Type":"ContainerDied","Data":"81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619"} Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.467366 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" event={"ID":"86928421-ee7e-4823-9483-80d3d4855283","Type":"ContainerDied","Data":"decf891f83bb3a297b4367d1816602bd57122541cc4be181ccf5af1c21988167"} Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.467392 5081 scope.go:117] "RemoveContainer" containerID="81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.467620 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vk4jz" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.506256 5081 scope.go:117] "RemoveContainer" containerID="81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619" Oct 03 15:36:58 crc kubenswrapper[5081]: E1003 15:36:58.507103 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619\": container with ID starting with 81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619 not found: ID does not exist" containerID="81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.507171 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619"} err="failed to get container status \"81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619\": rpc error: code = NotFound desc = could not find container \"81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619\": container with ID starting with 81cdeb7295acc7e9b80e5601bd69021c8052e00cffdf0e0365ca28dc2661f619 not found: ID does not exist" Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.508539 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vk4jz"] Oct 03 15:36:58 crc kubenswrapper[5081]: I1003 15:36:58.512047 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vk4jz"] Oct 03 15:36:59 crc kubenswrapper[5081]: I1003 15:36:59.836285 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86928421-ee7e-4823-9483-80d3d4855283" path="/var/lib/kubelet/pods/86928421-ee7e-4823-9483-80d3d4855283/volumes" Oct 03 15:37:00 crc kubenswrapper[5081]: I1003 15:37:00.647861 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:37:00 crc kubenswrapper[5081]: I1003 15:37:00.648429 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:37:01 crc kubenswrapper[5081]: I1003 15:37:01.990721 5081 scope.go:117] "RemoveContainer" containerID="20df0b8d991fba32685a3237eb68af4ce0c627ee63a239e2e1f93bc2bcda106d" Oct 03 15:37:02 crc kubenswrapper[5081]: I1003 15:37:02.009982 5081 scope.go:117] "RemoveContainer" containerID="9161623a6b2b106df6b3b9a6270401910d6e30631331b0f7b11e833d4a2c6c4a" Oct 03 15:37:30 crc kubenswrapper[5081]: I1003 15:37:30.648313 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:37:30 crc kubenswrapper[5081]: I1003 15:37:30.649617 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.647928 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.648864 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.648935 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.649910 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"63af043e610585f95cf2fe036c18efbb219a40ffaa2e79808b6ccfbd888824bd"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.649991 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://63af043e610585f95cf2fe036c18efbb219a40ffaa2e79808b6ccfbd888824bd" gracePeriod=600 Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.910998 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="63af043e610585f95cf2fe036c18efbb219a40ffaa2e79808b6ccfbd888824bd" exitCode=0 Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.911077 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"63af043e610585f95cf2fe036c18efbb219a40ffaa2e79808b6ccfbd888824bd"} Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.911789 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"a97bc62e9210356568f624b3588b885554295304e0cb244f570d3f9d71dc1ba9"} Oct 03 15:38:00 crc kubenswrapper[5081]: I1003 15:38:00.911868 5081 scope.go:117] "RemoveContainer" containerID="682928bd6d83657f2137a748c80e0c603574009da0edd7028e9f7d2287a58ef8" Oct 03 15:38:02 crc kubenswrapper[5081]: I1003 15:38:02.067819 5081 scope.go:117] "RemoveContainer" containerID="4a4bc66739de75440831ebd6a53422c1af34a0d2a954ca949e02b4aab1880f7e" Oct 03 15:38:02 crc kubenswrapper[5081]: I1003 15:38:02.092601 5081 scope.go:117] "RemoveContainer" containerID="d5b15f555ab3d75d3f93e7143dfd24cd43fbc4bd189e64dbadfca5838ccd1c8b" Oct 03 15:38:02 crc kubenswrapper[5081]: I1003 15:38:02.129845 5081 scope.go:117] "RemoveContainer" containerID="cd1bc455f34051d89197d11cc86960120897326031562230af13318437f849de" Oct 03 15:38:02 crc kubenswrapper[5081]: I1003 15:38:02.154932 5081 scope.go:117] "RemoveContainer" containerID="34d7df48f18635f90616566908f9e36bbcad5e7fb390b5ebd840d588cf2fe832" Oct 03 15:40:00 crc kubenswrapper[5081]: I1003 15:40:00.648094 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:40:00 crc kubenswrapper[5081]: I1003 15:40:00.648810 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.822834 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5bxx6"] Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.824035 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-controller" containerID="cri-o://3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" gracePeriod=30 Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.824104 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="nbdb" containerID="cri-o://8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" gracePeriod=30 Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.824197 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" gracePeriod=30 Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.824249 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="northd" containerID="cri-o://9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" gracePeriod=30 Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.824243 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-acl-logging" containerID="cri-o://a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" gracePeriod=30 Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.824230 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="sbdb" containerID="cri-o://6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" gracePeriod=30 Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.824324 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-node" containerID="cri-o://11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" gracePeriod=30 Oct 03 15:40:12 crc kubenswrapper[5081]: I1003 15:40:12.863874 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" containerID="cri-o://61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" gracePeriod=30 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.171838 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/3.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.174348 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovn-acl-logging/0.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.174984 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovn-controller/0.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.175680 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233342 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rq5c9"] Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233610 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233626 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233752 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233763 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233775 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233785 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233798 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233807 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233818 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="nbdb" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233825 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="nbdb" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233835 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233845 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233854 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86928421-ee7e-4823-9483-80d3d4855283" containerName="registry" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233862 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="86928421-ee7e-4823-9483-80d3d4855283" containerName="registry" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233872 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="sbdb" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233879 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="sbdb" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233889 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kubecfg-setup" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233896 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kubecfg-setup" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233908 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233915 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233928 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-node" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233935 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-node" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233943 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="northd" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233948 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="northd" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233959 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-acl-logging" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233964 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-acl-logging" Oct 03 15:40:13 crc kubenswrapper[5081]: E1003 15:40:13.233971 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.233977 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234071 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234082 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234090 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234096 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovn-acl-logging" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234105 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="nbdb" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234112 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234120 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="sbdb" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234128 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-node" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234149 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="northd" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234157 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234164 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234172 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="86928421-ee7e-4823-9483-80d3d4855283" containerName="registry" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.234535 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerName="ovnkube-controller" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.251385 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.362798 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rphc2\" (UniqueName: \"kubernetes.io/projected/e63642c7-8d80-4615-94d9-91d4c41421cc-kube-api-access-rphc2\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.362868 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-bin\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.362896 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-script-lib\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.362917 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-kubelet\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.362940 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e63642c7-8d80-4615-94d9-91d4c41421cc-ovn-node-metrics-cert\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.362959 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-systemd\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.362985 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-openvswitch\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363013 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-var-lib-openvswitch\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363001 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363043 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-ovn\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363068 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-systemd-units\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363080 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363109 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-env-overrides\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363138 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-netns\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363163 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-etc-openvswitch\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363186 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-config\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363206 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-log-socket\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363266 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-slash\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363243 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363302 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-node-log\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363379 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-node-log" (OuterVolumeSpecName: "node-log") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363424 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363448 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363446 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-ovn-kubernetes\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363454 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363468 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363524 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-netd\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363544 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-log-socket" (OuterVolumeSpecName: "log-socket") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363601 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363612 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"e63642c7-8d80-4615-94d9-91d4c41421cc\" (UID: \"e63642c7-8d80-4615-94d9-91d4c41421cc\") " Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363626 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-slash" (OuterVolumeSpecName: "host-slash") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363650 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363675 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363698 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363809 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363919 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363944 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.363975 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-node-log\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364022 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-run-netns\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364077 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-var-lib-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364140 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-kubelet\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364147 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364301 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-ovn\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364332 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovnkube-script-lib\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364402 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-log-socket\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364444 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-run-ovn-kubernetes\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364483 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364527 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-systemd-units\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364628 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5745z\" (UniqueName: \"kubernetes.io/projected/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-kube-api-access-5745z\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364712 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovnkube-config\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364742 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-env-overrides\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364767 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovn-node-metrics-cert\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364796 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-cni-bin\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364853 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-cni-netd\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364873 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-systemd\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364939 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-slash\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.364964 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-etc-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365106 5081 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365121 5081 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365135 5081 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365147 5081 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365160 5081 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365173 5081 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365186 5081 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-log-socket\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365199 5081 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-slash\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365214 5081 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-node-log\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365228 5081 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365241 5081 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365254 5081 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365267 5081 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365281 5081 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e63642c7-8d80-4615-94d9-91d4c41421cc-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365295 5081 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365308 5081 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.365322 5081 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.369717 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e63642c7-8d80-4615-94d9-91d4c41421cc-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.369876 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e63642c7-8d80-4615-94d9-91d4c41421cc-kube-api-access-rphc2" (OuterVolumeSpecName: "kube-api-access-rphc2") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "kube-api-access-rphc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.378508 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "e63642c7-8d80-4615-94d9-91d4c41421cc" (UID: "e63642c7-8d80-4615-94d9-91d4c41421cc"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466805 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-ovn\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466867 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovnkube-script-lib\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466904 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-log-socket\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466925 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-run-ovn-kubernetes\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466962 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466992 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-log-socket\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467053 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-systemd-units\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466998 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-systemd-units\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467088 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.466958 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-ovn\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467125 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5745z\" (UniqueName: \"kubernetes.io/projected/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-kube-api-access-5745z\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467171 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovnkube-config\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467193 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-env-overrides\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467216 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovn-node-metrics-cert\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467243 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-cni-bin\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467268 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-cni-netd\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467289 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-systemd\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467318 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-slash\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467345 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-etc-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467379 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467401 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-node-log\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467420 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-run-netns\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467062 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-run-ovn-kubernetes\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467447 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-var-lib-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467478 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-var-lib-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467516 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-kubelet\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467652 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rphc2\" (UniqueName: \"kubernetes.io/projected/e63642c7-8d80-4615-94d9-91d4c41421cc-kube-api-access-rphc2\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467674 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e63642c7-8d80-4615-94d9-91d4c41421cc-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467686 5081 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e63642c7-8d80-4615-94d9-91d4c41421cc-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467735 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-kubelet\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467771 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-systemd\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467943 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-cni-bin\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468016 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-slash\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468031 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-node-log\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.467949 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovnkube-script-lib\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468066 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-run-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468115 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-cni-netd\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468114 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-etc-openvswitch\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468150 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-host-run-netns\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468326 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovnkube-config\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.468375 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-env-overrides\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.474739 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-ovn-node-metrics-cert\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.497332 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5745z\" (UniqueName: \"kubernetes.io/projected/16eeca1f-850d-4aaa-9d18-eeb5ccb155af-kube-api-access-5745z\") pod \"ovnkube-node-rq5c9\" (UID: \"16eeca1f-850d-4aaa-9d18-eeb5ccb155af\") " pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.573234 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.790486 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"143c7cceb8f82ec62e1b2e51728a2d684ce8c2e39a3aa9887c142fa85fe515e2"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.791006 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"a33ec93040a2c7847a00d1e32f9b9f7ffaf64d579cd9467cf98bdce8aad3d049"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.792516 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovnkube-controller/3.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.794521 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovn-acl-logging/0.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.794997 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5bxx6_e63642c7-8d80-4615-94d9-91d4c41421cc/ovn-controller/0.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795322 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" exitCode=0 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795352 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" exitCode=0 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795360 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" exitCode=0 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795368 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" exitCode=0 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795378 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" exitCode=0 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795386 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" exitCode=0 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795393 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" exitCode=143 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795400 5081 generic.go:334] "Generic (PLEG): container finished" podID="e63642c7-8d80-4615-94d9-91d4c41421cc" containerID="3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" exitCode=143 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795434 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795456 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795467 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795479 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795491 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795500 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795511 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795522 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795530 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795541 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795553 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795598 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795606 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795613 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795618 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795628 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795639 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795647 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795656 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795669 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795677 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795683 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795691 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795698 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795705 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795711 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795722 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795734 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795743 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795750 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795759 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795767 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795775 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795782 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795788 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795795 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795801 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795811 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" event={"ID":"e63642c7-8d80-4615-94d9-91d4c41421cc","Type":"ContainerDied","Data":"94290c03451ed4d558da2921c984ebe6c69cbda00566a01a100c670d309d276c"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795822 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795830 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795836 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795845 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795852 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795859 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795864 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795871 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795876 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795881 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.795898 5081 scope.go:117] "RemoveContainer" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.796102 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.799532 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/2.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.800348 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/1.log" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.800390 5081 generic.go:334] "Generic (PLEG): container finished" podID="af6b6616-1e4c-4618-890b-7eb334b8c339" containerID="ad25add9e1a27cf97894e382b0a37902bcef22aaa3f43e28a432ee4577a42d31" exitCode=2 Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.800417 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerDied","Data":"ad25add9e1a27cf97894e382b0a37902bcef22aaa3f43e28a432ee4577a42d31"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.800437 5081 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e"} Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.801470 5081 scope.go:117] "RemoveContainer" containerID="ad25add9e1a27cf97894e382b0a37902bcef22aaa3f43e28a432ee4577a42d31" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.876357 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.900424 5081 scope.go:117] "RemoveContainer" containerID="6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.926485 5081 scope.go:117] "RemoveContainer" containerID="8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.942576 5081 scope.go:117] "RemoveContainer" containerID="9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.980801 5081 scope.go:117] "RemoveContainer" containerID="72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" Oct 03 15:40:13 crc kubenswrapper[5081]: I1003 15:40:13.995287 5081 scope.go:117] "RemoveContainer" containerID="11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.021811 5081 scope.go:117] "RemoveContainer" containerID="a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.052579 5081 scope.go:117] "RemoveContainer" containerID="3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.099650 5081 scope.go:117] "RemoveContainer" containerID="093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.117253 5081 scope.go:117] "RemoveContainer" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.117892 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": container with ID starting with 61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9 not found: ID does not exist" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.117961 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} err="failed to get container status \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": rpc error: code = NotFound desc = could not find container \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": container with ID starting with 61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.118000 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.118440 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": container with ID starting with dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f not found: ID does not exist" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.118499 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} err="failed to get container status \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": rpc error: code = NotFound desc = could not find container \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": container with ID starting with dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.118533 5081 scope.go:117] "RemoveContainer" containerID="6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.118901 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": container with ID starting with 6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5 not found: ID does not exist" containerID="6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.118964 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} err="failed to get container status \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": rpc error: code = NotFound desc = could not find container \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": container with ID starting with 6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.118993 5081 scope.go:117] "RemoveContainer" containerID="8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.119222 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": container with ID starting with 8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d not found: ID does not exist" containerID="8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.119253 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} err="failed to get container status \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": rpc error: code = NotFound desc = could not find container \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": container with ID starting with 8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.119269 5081 scope.go:117] "RemoveContainer" containerID="9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.119499 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": container with ID starting with 9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae not found: ID does not exist" containerID="9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.119524 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} err="failed to get container status \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": rpc error: code = NotFound desc = could not find container \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": container with ID starting with 9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.119541 5081 scope.go:117] "RemoveContainer" containerID="72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.119886 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": container with ID starting with 72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d not found: ID does not exist" containerID="72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.119920 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} err="failed to get container status \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": rpc error: code = NotFound desc = could not find container \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": container with ID starting with 72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.119939 5081 scope.go:117] "RemoveContainer" containerID="11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.120203 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": container with ID starting with 11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5 not found: ID does not exist" containerID="11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.120239 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} err="failed to get container status \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": rpc error: code = NotFound desc = could not find container \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": container with ID starting with 11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.120264 5081 scope.go:117] "RemoveContainer" containerID="a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.120575 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": container with ID starting with a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0 not found: ID does not exist" containerID="a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.120607 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} err="failed to get container status \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": rpc error: code = NotFound desc = could not find container \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": container with ID starting with a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.120633 5081 scope.go:117] "RemoveContainer" containerID="3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.120895 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": container with ID starting with 3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4 not found: ID does not exist" containerID="3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.120923 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} err="failed to get container status \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": rpc error: code = NotFound desc = could not find container \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": container with ID starting with 3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.120941 5081 scope.go:117] "RemoveContainer" containerID="093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2" Oct 03 15:40:14 crc kubenswrapper[5081]: E1003 15:40:14.121315 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": container with ID starting with 093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2 not found: ID does not exist" containerID="093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.121339 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} err="failed to get container status \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": rpc error: code = NotFound desc = could not find container \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": container with ID starting with 093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.121356 5081 scope.go:117] "RemoveContainer" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.121887 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} err="failed to get container status \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": rpc error: code = NotFound desc = could not find container \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": container with ID starting with 61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.121916 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.122299 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} err="failed to get container status \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": rpc error: code = NotFound desc = could not find container \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": container with ID starting with dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.122324 5081 scope.go:117] "RemoveContainer" containerID="6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.122594 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} err="failed to get container status \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": rpc error: code = NotFound desc = could not find container \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": container with ID starting with 6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.122623 5081 scope.go:117] "RemoveContainer" containerID="8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123024 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} err="failed to get container status \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": rpc error: code = NotFound desc = could not find container \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": container with ID starting with 8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123047 5081 scope.go:117] "RemoveContainer" containerID="9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123280 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} err="failed to get container status \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": rpc error: code = NotFound desc = could not find container \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": container with ID starting with 9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123301 5081 scope.go:117] "RemoveContainer" containerID="72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123512 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} err="failed to get container status \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": rpc error: code = NotFound desc = could not find container \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": container with ID starting with 72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123533 5081 scope.go:117] "RemoveContainer" containerID="11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123797 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} err="failed to get container status \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": rpc error: code = NotFound desc = could not find container \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": container with ID starting with 11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.123833 5081 scope.go:117] "RemoveContainer" containerID="a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.124210 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} err="failed to get container status \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": rpc error: code = NotFound desc = could not find container \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": container with ID starting with a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.124237 5081 scope.go:117] "RemoveContainer" containerID="3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.124628 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} err="failed to get container status \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": rpc error: code = NotFound desc = could not find container \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": container with ID starting with 3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.124664 5081 scope.go:117] "RemoveContainer" containerID="093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.125045 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} err="failed to get container status \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": rpc error: code = NotFound desc = could not find container \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": container with ID starting with 093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.125071 5081 scope.go:117] "RemoveContainer" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.125435 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} err="failed to get container status \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": rpc error: code = NotFound desc = could not find container \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": container with ID starting with 61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.125469 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.125935 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} err="failed to get container status \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": rpc error: code = NotFound desc = could not find container \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": container with ID starting with dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.125989 5081 scope.go:117] "RemoveContainer" containerID="6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.126347 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} err="failed to get container status \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": rpc error: code = NotFound desc = could not find container \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": container with ID starting with 6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.126372 5081 scope.go:117] "RemoveContainer" containerID="8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.126741 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} err="failed to get container status \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": rpc error: code = NotFound desc = could not find container \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": container with ID starting with 8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.126774 5081 scope.go:117] "RemoveContainer" containerID="9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.127063 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} err="failed to get container status \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": rpc error: code = NotFound desc = could not find container \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": container with ID starting with 9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.127089 5081 scope.go:117] "RemoveContainer" containerID="72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.127407 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} err="failed to get container status \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": rpc error: code = NotFound desc = could not find container \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": container with ID starting with 72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.127441 5081 scope.go:117] "RemoveContainer" containerID="11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.127816 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} err="failed to get container status \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": rpc error: code = NotFound desc = could not find container \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": container with ID starting with 11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.127875 5081 scope.go:117] "RemoveContainer" containerID="a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.128318 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} err="failed to get container status \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": rpc error: code = NotFound desc = could not find container \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": container with ID starting with a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.128345 5081 scope.go:117] "RemoveContainer" containerID="3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.128699 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} err="failed to get container status \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": rpc error: code = NotFound desc = could not find container \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": container with ID starting with 3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.128745 5081 scope.go:117] "RemoveContainer" containerID="093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.129024 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} err="failed to get container status \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": rpc error: code = NotFound desc = could not find container \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": container with ID starting with 093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.129057 5081 scope.go:117] "RemoveContainer" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.129400 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} err="failed to get container status \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": rpc error: code = NotFound desc = could not find container \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": container with ID starting with 61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.129426 5081 scope.go:117] "RemoveContainer" containerID="dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.129773 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f"} err="failed to get container status \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": rpc error: code = NotFound desc = could not find container \"dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f\": container with ID starting with dbdf4d44285e8809d7a09869ebf82e5e413b03a3e06d2cbbebf3cf4cc84c7b3f not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.129798 5081 scope.go:117] "RemoveContainer" containerID="6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.130230 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5"} err="failed to get container status \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": rpc error: code = NotFound desc = could not find container \"6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5\": container with ID starting with 6b8aaedba1c06a6ff9b9c7d9906b2c348ce0b8d0f70074d046eed28d048582d5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.130262 5081 scope.go:117] "RemoveContainer" containerID="8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.130580 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d"} err="failed to get container status \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": rpc error: code = NotFound desc = could not find container \"8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d\": container with ID starting with 8b3bca2ae8ceb6d5b270e200fe5bf42f78f252bde94c73c5f9e60492ac04674d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.130608 5081 scope.go:117] "RemoveContainer" containerID="9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.130963 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae"} err="failed to get container status \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": rpc error: code = NotFound desc = could not find container \"9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae\": container with ID starting with 9cc429101644ab996ad0945ec06cab0dc446994acb1810c1266e37f021dfeeae not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.131006 5081 scope.go:117] "RemoveContainer" containerID="72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.131324 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d"} err="failed to get container status \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": rpc error: code = NotFound desc = could not find container \"72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d\": container with ID starting with 72a547bc9904716cc740294d058e0bf45daf7254a5bf56fc45a2d4671646452d not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.131353 5081 scope.go:117] "RemoveContainer" containerID="11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.131635 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5"} err="failed to get container status \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": rpc error: code = NotFound desc = could not find container \"11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5\": container with ID starting with 11a12ab835368059a9229b832c5aeb6120511923f992293f2bf2cb9045adc7c5 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.131669 5081 scope.go:117] "RemoveContainer" containerID="a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.131897 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0"} err="failed to get container status \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": rpc error: code = NotFound desc = could not find container \"a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0\": container with ID starting with a4392898e1cbd35c7424b16ce027047a4131bff2ad48b6e63fabe9bd2c3e46e0 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.131926 5081 scope.go:117] "RemoveContainer" containerID="3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.132188 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4"} err="failed to get container status \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": rpc error: code = NotFound desc = could not find container \"3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4\": container with ID starting with 3070cfe02c586110616e493a1c5d3da96a5c51f47b564072e88fe8d04d3ce9b4 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.132219 5081 scope.go:117] "RemoveContainer" containerID="093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.132469 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2"} err="failed to get container status \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": rpc error: code = NotFound desc = could not find container \"093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2\": container with ID starting with 093caa95a50322d810d35c71c440396870b44fca68cdfa298aea92dd8e104fa2 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.132497 5081 scope.go:117] "RemoveContainer" containerID="61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.132742 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9"} err="failed to get container status \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": rpc error: code = NotFound desc = could not find container \"61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9\": container with ID starting with 61b6d4a30dde475e0464d5a8564db0e208026583aaf4477434164b6531ee2ba9 not found: ID does not exist" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.809478 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/2.log" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.811041 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/1.log" Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.811163 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-7fljw" event={"ID":"af6b6616-1e4c-4618-890b-7eb334b8c339","Type":"ContainerStarted","Data":"7a3ca3a8e24f0ddf0f61d2bf159140f88cbd84fc74f1e461a617b43e0403a26e"} Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.815019 5081 generic.go:334] "Generic (PLEG): container finished" podID="16eeca1f-850d-4aaa-9d18-eeb5ccb155af" containerID="143c7cceb8f82ec62e1b2e51728a2d684ce8c2e39a3aa9887c142fa85fe515e2" exitCode=0 Oct 03 15:40:14 crc kubenswrapper[5081]: I1003 15:40:14.815079 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerDied","Data":"143c7cceb8f82ec62e1b2e51728a2d684ce8c2e39a3aa9887c142fa85fe515e2"} Oct 03 15:40:15 crc kubenswrapper[5081]: I1003 15:40:15.840277 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"bb74fcdb03559c9dd5169e32b167a2ee75f9f9b8614aa0ce8006e4c75a973caf"} Oct 03 15:40:15 crc kubenswrapper[5081]: I1003 15:40:15.841240 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"bfac180b485e05129f339f1a0ba1a7e96523be6ab1b2ecb405bf48adbabc9a86"} Oct 03 15:40:15 crc kubenswrapper[5081]: I1003 15:40:15.841262 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"87fc8cdd4e095b8d4119a797c9c74e5816e3ed2209a79c4622692d62d4ff3fc8"} Oct 03 15:40:15 crc kubenswrapper[5081]: I1003 15:40:15.841282 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"986d4508d404ddb2c1777a96d7a806d08b20f15f9cd89d134e225a9c899cec69"} Oct 03 15:40:15 crc kubenswrapper[5081]: I1003 15:40:15.841302 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"7cd7a6fe62a8c7d41098e83d396ad93cad5a5780393898a0c95a5261cdbd21ad"} Oct 03 15:40:15 crc kubenswrapper[5081]: I1003 15:40:15.841318 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"7c2ad368243b3cd97f2ba5b50558d2bbd88ed082345e8f226532e2ecd4058834"} Oct 03 15:40:17 crc kubenswrapper[5081]: I1003 15:40:17.852476 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"b8179daa15c0e1d21df26c2ff7e40fac334fbf82a5143e15cdcd487d727d758a"} Oct 03 15:40:20 crc kubenswrapper[5081]: I1003 15:40:20.885070 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" event={"ID":"16eeca1f-850d-4aaa-9d18-eeb5ccb155af","Type":"ContainerStarted","Data":"b7edea909e3e190ab3ab3581a571d5bd96c013bfd8ab4627863f9a2a541415bc"} Oct 03 15:40:20 crc kubenswrapper[5081]: I1003 15:40:20.886013 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:20 crc kubenswrapper[5081]: I1003 15:40:20.886043 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:20 crc kubenswrapper[5081]: I1003 15:40:20.886059 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:20 crc kubenswrapper[5081]: I1003 15:40:20.914809 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" podStartSLOduration=7.9147890279999995 podStartE2EDuration="7.914789028s" podCreationTimestamp="2025-10-03 15:40:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:40:20.914688825 +0000 UTC m=+739.880245438" watchObservedRunningTime="2025-10-03 15:40:20.914789028 +0000 UTC m=+739.880345641" Oct 03 15:40:20 crc kubenswrapper[5081]: I1003 15:40:20.922666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:20 crc kubenswrapper[5081]: I1003 15:40:20.924659 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.238679 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-n8c8h"] Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.239815 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.242579 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.242626 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.243936 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.244113 5081 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-vg5g2" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.252995 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-n8c8h"] Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.301048 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-crc-storage\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.301108 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhdlf\" (UniqueName: \"kubernetes.io/projected/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-kube-api-access-zhdlf\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.301145 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-node-mnt\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.403036 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhdlf\" (UniqueName: \"kubernetes.io/projected/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-kube-api-access-zhdlf\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.403095 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-crc-storage\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.403124 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-node-mnt\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.403463 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-node-mnt\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.403988 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-crc-storage\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.426456 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhdlf\" (UniqueName: \"kubernetes.io/projected/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-kube-api-access-zhdlf\") pod \"crc-storage-crc-n8c8h\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.559864 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.585150 5081 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(478f1d2fa5e7717de18b157581cc664afa4cf8cb506ec26c019ec4d8e1bf4c84): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.585261 5081 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(478f1d2fa5e7717de18b157581cc664afa4cf8cb506ec26c019ec4d8e1bf4c84): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.585306 5081 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(478f1d2fa5e7717de18b157581cc664afa4cf8cb506ec26c019ec4d8e1bf4c84): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.585375 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-n8c8h_crc-storage(2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-n8c8h_crc-storage(2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(478f1d2fa5e7717de18b157581cc664afa4cf8cb506ec26c019ec4d8e1bf4c84): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-n8c8h" podUID="2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.890395 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: I1003 15:40:21.891020 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.922634 5081 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(8020e793b38ce4010e917cf3cfa58b07093285435b80f01f6464e95f888afbad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.922842 5081 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(8020e793b38ce4010e917cf3cfa58b07093285435b80f01f6464e95f888afbad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.922951 5081 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(8020e793b38ce4010e917cf3cfa58b07093285435b80f01f6464e95f888afbad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:21 crc kubenswrapper[5081]: E1003 15:40:21.923094 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-n8c8h_crc-storage(2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-n8c8h_crc-storage(2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-n8c8h_crc-storage_2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b_0(8020e793b38ce4010e917cf3cfa58b07093285435b80f01f6464e95f888afbad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-n8c8h" podUID="2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" Oct 03 15:40:30 crc kubenswrapper[5081]: I1003 15:40:30.647816 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:40:30 crc kubenswrapper[5081]: I1003 15:40:30.648532 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.098868 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cgxc7"] Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.099676 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" podUID="69e0e14b-c635-4027-b3ac-f89fd4d71f1b" containerName="controller-manager" containerID="cri-o://2eb004ffa330f9acf87394d441ccd0eab644c8ad37798418a3b389c83f3aba10" gracePeriod=30 Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.208408 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl"] Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.208727 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" podUID="c418f3b3-41e5-4185-84f7-22f8dd9c5431" containerName="route-controller-manager" containerID="cri-o://35e616c3f3866571889739d9a68ecf92e02aa15cfc063f58e194c7945ea84f40" gracePeriod=30 Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.977773 5081 generic.go:334] "Generic (PLEG): container finished" podID="69e0e14b-c635-4027-b3ac-f89fd4d71f1b" containerID="2eb004ffa330f9acf87394d441ccd0eab644c8ad37798418a3b389c83f3aba10" exitCode=0 Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.978233 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" event={"ID":"69e0e14b-c635-4027-b3ac-f89fd4d71f1b","Type":"ContainerDied","Data":"2eb004ffa330f9acf87394d441ccd0eab644c8ad37798418a3b389c83f3aba10"} Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.981169 5081 generic.go:334] "Generic (PLEG): container finished" podID="c418f3b3-41e5-4185-84f7-22f8dd9c5431" containerID="35e616c3f3866571889739d9a68ecf92e02aa15cfc063f58e194c7945ea84f40" exitCode=0 Oct 03 15:40:33 crc kubenswrapper[5081]: I1003 15:40:33.981201 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" event={"ID":"c418f3b3-41e5-4185-84f7-22f8dd9c5431","Type":"ContainerDied","Data":"35e616c3f3866571889739d9a68ecf92e02aa15cfc063f58e194c7945ea84f40"} Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.072897 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.182023 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-proxy-ca-bundles\") pod \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.182097 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-config\") pod \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.182143 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-serving-cert\") pod \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.182309 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q84db\" (UniqueName: \"kubernetes.io/projected/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-kube-api-access-q84db\") pod \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.182954 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "69e0e14b-c635-4027-b3ac-f89fd4d71f1b" (UID: "69e0e14b-c635-4027-b3ac-f89fd4d71f1b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.183241 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-client-ca\") pod \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\" (UID: \"69e0e14b-c635-4027-b3ac-f89fd4d71f1b\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.183658 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-client-ca" (OuterVolumeSpecName: "client-ca") pod "69e0e14b-c635-4027-b3ac-f89fd4d71f1b" (UID: "69e0e14b-c635-4027-b3ac-f89fd4d71f1b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.183679 5081 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.183918 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-config" (OuterVolumeSpecName: "config") pod "69e0e14b-c635-4027-b3ac-f89fd4d71f1b" (UID: "69e0e14b-c635-4027-b3ac-f89fd4d71f1b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.188896 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "69e0e14b-c635-4027-b3ac-f89fd4d71f1b" (UID: "69e0e14b-c635-4027-b3ac-f89fd4d71f1b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.188899 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-kube-api-access-q84db" (OuterVolumeSpecName: "kube-api-access-q84db") pod "69e0e14b-c635-4027-b3ac-f89fd4d71f1b" (UID: "69e0e14b-c635-4027-b3ac-f89fd4d71f1b"). InnerVolumeSpecName "kube-api-access-q84db". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.193097 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.284759 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5mw4\" (UniqueName: \"kubernetes.io/projected/c418f3b3-41e5-4185-84f7-22f8dd9c5431-kube-api-access-b5mw4\") pod \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.284843 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-config\") pod \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.284976 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c418f3b3-41e5-4185-84f7-22f8dd9c5431-serving-cert\") pod \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.285033 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-client-ca\") pod \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\" (UID: \"c418f3b3-41e5-4185-84f7-22f8dd9c5431\") " Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.285305 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.285328 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.285338 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q84db\" (UniqueName: \"kubernetes.io/projected/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-kube-api-access-q84db\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.285349 5081 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/69e0e14b-c635-4027-b3ac-f89fd4d71f1b-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.286599 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-client-ca" (OuterVolumeSpecName: "client-ca") pod "c418f3b3-41e5-4185-84f7-22f8dd9c5431" (UID: "c418f3b3-41e5-4185-84f7-22f8dd9c5431"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.286551 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-config" (OuterVolumeSpecName: "config") pod "c418f3b3-41e5-4185-84f7-22f8dd9c5431" (UID: "c418f3b3-41e5-4185-84f7-22f8dd9c5431"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.289353 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c418f3b3-41e5-4185-84f7-22f8dd9c5431-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c418f3b3-41e5-4185-84f7-22f8dd9c5431" (UID: "c418f3b3-41e5-4185-84f7-22f8dd9c5431"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.289489 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c418f3b3-41e5-4185-84f7-22f8dd9c5431-kube-api-access-b5mw4" (OuterVolumeSpecName: "kube-api-access-b5mw4") pod "c418f3b3-41e5-4185-84f7-22f8dd9c5431" (UID: "c418f3b3-41e5-4185-84f7-22f8dd9c5431"). InnerVolumeSpecName "kube-api-access-b5mw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.386731 5081 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.386789 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5mw4\" (UniqueName: \"kubernetes.io/projected/c418f3b3-41e5-4185-84f7-22f8dd9c5431-kube-api-access-b5mw4\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.386814 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c418f3b3-41e5-4185-84f7-22f8dd9c5431-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.386830 5081 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c418f3b3-41e5-4185-84f7-22f8dd9c5431-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.826935 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.827581 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.993067 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" event={"ID":"c418f3b3-41e5-4185-84f7-22f8dd9c5431","Type":"ContainerDied","Data":"ed25ec0ce0aa23ccc2869c14a106c469e69e732c482d3779d36f00a912ca809a"} Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.993580 5081 scope.go:117] "RemoveContainer" containerID="35e616c3f3866571889739d9a68ecf92e02aa15cfc063f58e194c7945ea84f40" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.993756 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl" Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.998785 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" event={"ID":"69e0e14b-c635-4027-b3ac-f89fd4d71f1b","Type":"ContainerDied","Data":"2a5984f8bb8d5dee706b85552593e8a012bf69f5d42ae326f623bdab1d2d6eee"} Oct 03 15:40:34 crc kubenswrapper[5081]: I1003 15:40:34.998913 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cgxc7" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.021623 5081 scope.go:117] "RemoveContainer" containerID="2eb004ffa330f9acf87394d441ccd0eab644c8ad37798418a3b389c83f3aba10" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.042284 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.049746 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8bsnl"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.054373 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cgxc7"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.064377 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cgxc7"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.184422 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-78897f64d6-d6fh2"] Oct 03 15:40:35 crc kubenswrapper[5081]: E1003 15:40:35.184806 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69e0e14b-c635-4027-b3ac-f89fd4d71f1b" containerName="controller-manager" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.184828 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="69e0e14b-c635-4027-b3ac-f89fd4d71f1b" containerName="controller-manager" Oct 03 15:40:35 crc kubenswrapper[5081]: E1003 15:40:35.184846 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c418f3b3-41e5-4185-84f7-22f8dd9c5431" containerName="route-controller-manager" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.184854 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c418f3b3-41e5-4185-84f7-22f8dd9c5431" containerName="route-controller-manager" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.184986 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c418f3b3-41e5-4185-84f7-22f8dd9c5431" containerName="route-controller-manager" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.185000 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="69e0e14b-c635-4027-b3ac-f89fd4d71f1b" containerName="controller-manager" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.185655 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.187481 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.188494 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.189055 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.189432 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.195578 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.196041 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.197672 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.198029 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.198093 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.198329 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.198413 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.198543 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.198670 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.198733 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199192 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r2w6\" (UniqueName: \"kubernetes.io/projected/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-kube-api-access-2r2w6\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-config\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199427 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-proxy-ca-bundles\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199520 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-config\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199580 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-serving-cert\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199606 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-serving-cert\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199671 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-client-ca\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199705 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsnk2\" (UniqueName: \"kubernetes.io/projected/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-kube-api-access-jsnk2\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.199740 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-client-ca\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.203026 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.204462 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.205620 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78897f64d6-d6fh2"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.259975 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-n8c8h"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.266288 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.301317 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r2w6\" (UniqueName: \"kubernetes.io/projected/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-kube-api-access-2r2w6\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.301381 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-config\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.301424 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-proxy-ca-bundles\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.301444 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-config\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.301465 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-serving-cert\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.301484 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-serving-cert\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.303032 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-proxy-ca-bundles\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.303136 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-config\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.303152 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-config\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.303337 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-client-ca\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.303377 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsnk2\" (UniqueName: \"kubernetes.io/projected/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-kube-api-access-jsnk2\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.303446 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-client-ca\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.304046 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-client-ca\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.304143 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-client-ca\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.309437 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-serving-cert\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.311975 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-serving-cert\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.325710 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsnk2\" (UniqueName: \"kubernetes.io/projected/d67d382b-1bca-4bb6-b92b-c77e8204a4c6-kube-api-access-jsnk2\") pod \"controller-manager-78897f64d6-d6fh2\" (UID: \"d67d382b-1bca-4bb6-b92b-c77e8204a4c6\") " pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.330362 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r2w6\" (UniqueName: \"kubernetes.io/projected/7743375b-0b40-4fe4-ad5b-1af8aa3579e3-kube-api-access-2r2w6\") pod \"route-controller-manager-bb799c9c4-kzbf8\" (UID: \"7743375b-0b40-4fe4-ad5b-1af8aa3579e3\") " pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.518213 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.527342 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.776250 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8"] Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.836165 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69e0e14b-c635-4027-b3ac-f89fd4d71f1b" path="/var/lib/kubelet/pods/69e0e14b-c635-4027-b3ac-f89fd4d71f1b/volumes" Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.836753 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c418f3b3-41e5-4185-84f7-22f8dd9c5431" path="/var/lib/kubelet/pods/c418f3b3-41e5-4185-84f7-22f8dd9c5431/volumes" Oct 03 15:40:35 crc kubenswrapper[5081]: W1003 15:40:35.837269 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd67d382b_1bca_4bb6_b92b_c77e8204a4c6.slice/crio-aa2757c84f3f19de081e51f5a2f8340a15d0a75b53a6aa7420077af15bd56f20 WatchSource:0}: Error finding container aa2757c84f3f19de081e51f5a2f8340a15d0a75b53a6aa7420077af15bd56f20: Status 404 returned error can't find the container with id aa2757c84f3f19de081e51f5a2f8340a15d0a75b53a6aa7420077af15bd56f20 Oct 03 15:40:35 crc kubenswrapper[5081]: I1003 15:40:35.837306 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78897f64d6-d6fh2"] Oct 03 15:40:36 crc kubenswrapper[5081]: I1003 15:40:36.005942 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n8c8h" event={"ID":"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b","Type":"ContainerStarted","Data":"1485b2d4c312743efc2faf2401b1e8c8e48abf437300b3bcb914c5bfc192fcad"} Oct 03 15:40:36 crc kubenswrapper[5081]: I1003 15:40:36.008535 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" event={"ID":"7743375b-0b40-4fe4-ad5b-1af8aa3579e3","Type":"ContainerStarted","Data":"2cf3a5bab1a07fd6d7ea71641dd1ca012e9098466d79a109fb397caa86121680"} Oct 03 15:40:36 crc kubenswrapper[5081]: I1003 15:40:36.010026 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" event={"ID":"d67d382b-1bca-4bb6-b92b-c77e8204a4c6","Type":"ContainerStarted","Data":"aa2757c84f3f19de081e51f5a2f8340a15d0a75b53a6aa7420077af15bd56f20"} Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.020103 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" event={"ID":"7743375b-0b40-4fe4-ad5b-1af8aa3579e3","Type":"ContainerStarted","Data":"c42bb96c8ee53c30aa474af9f0d98a861ec8013a9c21daa0c995980cbc7a604e"} Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.021221 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.022486 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" event={"ID":"d67d382b-1bca-4bb6-b92b-c77e8204a4c6","Type":"ContainerStarted","Data":"875d51f4db610a8cc3c32444bda7e19a815b95ce5f264cadca9997b3045f54da"} Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.024916 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.029110 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.029543 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.046088 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-bb799c9c4-kzbf8" podStartSLOduration=4.046053966 podStartE2EDuration="4.046053966s" podCreationTimestamp="2025-10-03 15:40:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:40:37.041853207 +0000 UTC m=+756.007409820" watchObservedRunningTime="2025-10-03 15:40:37.046053966 +0000 UTC m=+756.011610579" Oct 03 15:40:37 crc kubenswrapper[5081]: I1003 15:40:37.067732 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-78897f64d6-d6fh2" podStartSLOduration=4.067709345 podStartE2EDuration="4.067709345s" podCreationTimestamp="2025-10-03 15:40:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:40:37.063298909 +0000 UTC m=+756.028855532" watchObservedRunningTime="2025-10-03 15:40:37.067709345 +0000 UTC m=+756.033265988" Oct 03 15:40:40 crc kubenswrapper[5081]: I1003 15:40:40.044656 5081 generic.go:334] "Generic (PLEG): container finished" podID="2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" containerID="0e2eccbc2cfbc6e67f183377abba5cb02ae7e1b821f3aa1d4ca9daabe2879129" exitCode=0 Oct 03 15:40:40 crc kubenswrapper[5081]: I1003 15:40:40.044832 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n8c8h" event={"ID":"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b","Type":"ContainerDied","Data":"0e2eccbc2cfbc6e67f183377abba5cb02ae7e1b821f3aa1d4ca9daabe2879129"} Oct 03 15:40:40 crc kubenswrapper[5081]: I1003 15:40:40.189510 5081 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.375943 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.398829 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-node-mnt\") pod \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.398891 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhdlf\" (UniqueName: \"kubernetes.io/projected/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-kube-api-access-zhdlf\") pod \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.398952 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-crc-storage\") pod \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\" (UID: \"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b\") " Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.398961 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" (UID: "2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.399113 5081 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.411794 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-kube-api-access-zhdlf" (OuterVolumeSpecName: "kube-api-access-zhdlf") pod "2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" (UID: "2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b"). InnerVolumeSpecName "kube-api-access-zhdlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.427462 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" (UID: "2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.500259 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhdlf\" (UniqueName: \"kubernetes.io/projected/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-kube-api-access-zhdlf\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:41 crc kubenswrapper[5081]: I1003 15:40:41.500328 5081 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:42 crc kubenswrapper[5081]: I1003 15:40:42.061880 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n8c8h" event={"ID":"2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b","Type":"ContainerDied","Data":"1485b2d4c312743efc2faf2401b1e8c8e48abf437300b3bcb914c5bfc192fcad"} Oct 03 15:40:42 crc kubenswrapper[5081]: I1003 15:40:42.061925 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n8c8h" Oct 03 15:40:42 crc kubenswrapper[5081]: I1003 15:40:42.061932 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1485b2d4c312743efc2faf2401b1e8c8e48abf437300b3bcb914c5bfc192fcad" Oct 03 15:40:43 crc kubenswrapper[5081]: I1003 15:40:43.598726 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rq5c9" Oct 03 15:40:43 crc kubenswrapper[5081]: I1003 15:40:43.865960 5081 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pode63642c7-8d80-4615-94d9-91d4c41421cc"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pode63642c7-8d80-4615-94d9-91d4c41421cc] : Timed out while waiting for systemd to remove kubepods-burstable-pode63642c7_8d80_4615_94d9_91d4c41421cc.slice" Oct 03 15:40:43 crc kubenswrapper[5081]: E1003 15:40:43.866051 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable pode63642c7-8d80-4615-94d9-91d4c41421cc] : unable to destroy cgroup paths for cgroup [kubepods burstable pode63642c7-8d80-4615-94d9-91d4c41421cc] : Timed out while waiting for systemd to remove kubepods-burstable-pode63642c7_8d80_4615_94d9_91d4c41421cc.slice" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" Oct 03 15:40:44 crc kubenswrapper[5081]: I1003 15:40:44.072813 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5bxx6" Oct 03 15:40:44 crc kubenswrapper[5081]: I1003 15:40:44.095705 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5bxx6"] Oct 03 15:40:44 crc kubenswrapper[5081]: I1003 15:40:44.098206 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5bxx6"] Oct 03 15:40:45 crc kubenswrapper[5081]: I1003 15:40:45.835601 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e63642c7-8d80-4615-94d9-91d4c41421cc" path="/var/lib/kubelet/pods/e63642c7-8d80-4615-94d9-91d4c41421cc/volumes" Oct 03 15:40:49 crc kubenswrapper[5081]: I1003 15:40:49.919430 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq"] Oct 03 15:40:49 crc kubenswrapper[5081]: E1003 15:40:49.920103 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" containerName="storage" Oct 03 15:40:49 crc kubenswrapper[5081]: I1003 15:40:49.920117 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" containerName="storage" Oct 03 15:40:49 crc kubenswrapper[5081]: I1003 15:40:49.920218 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" containerName="storage" Oct 03 15:40:49 crc kubenswrapper[5081]: I1003 15:40:49.921082 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:49 crc kubenswrapper[5081]: I1003 15:40:49.923331 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 15:40:49 crc kubenswrapper[5081]: I1003 15:40:49.931431 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq"] Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.020625 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.020674 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.020749 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhg4l\" (UniqueName: \"kubernetes.io/projected/42f4375e-1b8e-4805-8031-a87e31f1d6c4-kube-api-access-jhg4l\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.121513 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.121578 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.121616 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhg4l\" (UniqueName: \"kubernetes.io/projected/42f4375e-1b8e-4805-8031-a87e31f1d6c4-kube-api-access-jhg4l\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.122270 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.122476 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.143675 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhg4l\" (UniqueName: \"kubernetes.io/projected/42f4375e-1b8e-4805-8031-a87e31f1d6c4-kube-api-access-jhg4l\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.239367 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:50 crc kubenswrapper[5081]: I1003 15:40:50.649136 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq"] Oct 03 15:40:51 crc kubenswrapper[5081]: I1003 15:40:51.114267 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" event={"ID":"42f4375e-1b8e-4805-8031-a87e31f1d6c4","Type":"ContainerStarted","Data":"64bc315df4a7495bb4242620ebebe6e21a2b557898b69b939ee651ea3a94ee98"} Oct 03 15:40:51 crc kubenswrapper[5081]: I1003 15:40:51.114400 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" event={"ID":"42f4375e-1b8e-4805-8031-a87e31f1d6c4","Type":"ContainerStarted","Data":"0611ab43996f9d889f1eb9e059bb101b33dfb8f7bdd32f87a02e935853065362"} Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.127700 5081 generic.go:334] "Generic (PLEG): container finished" podID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerID="64bc315df4a7495bb4242620ebebe6e21a2b557898b69b939ee651ea3a94ee98" exitCode=0 Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.128161 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" event={"ID":"42f4375e-1b8e-4805-8031-a87e31f1d6c4","Type":"ContainerDied","Data":"64bc315df4a7495bb4242620ebebe6e21a2b557898b69b939ee651ea3a94ee98"} Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.276234 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-brsv5"] Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.277897 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.298194 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-brsv5"] Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.350200 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxvc9\" (UniqueName: \"kubernetes.io/projected/24082b32-f00e-48cd-810a-a1fdcdeef512-kube-api-access-fxvc9\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.350254 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-catalog-content\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.350285 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-utilities\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.451909 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxvc9\" (UniqueName: \"kubernetes.io/projected/24082b32-f00e-48cd-810a-a1fdcdeef512-kube-api-access-fxvc9\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.452426 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-catalog-content\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.452460 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-utilities\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.452933 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-catalog-content\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.453042 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-utilities\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.478896 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxvc9\" (UniqueName: \"kubernetes.io/projected/24082b32-f00e-48cd-810a-a1fdcdeef512-kube-api-access-fxvc9\") pod \"redhat-operators-brsv5\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:52 crc kubenswrapper[5081]: I1003 15:40:52.597157 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:40:53 crc kubenswrapper[5081]: I1003 15:40:53.040767 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-brsv5"] Oct 03 15:40:53 crc kubenswrapper[5081]: I1003 15:40:53.135283 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brsv5" event={"ID":"24082b32-f00e-48cd-810a-a1fdcdeef512","Type":"ContainerStarted","Data":"5d0400e947ebceff5d179b8f9bfaa21e31bf9346f6f45664df55b459a90333d9"} Oct 03 15:40:54 crc kubenswrapper[5081]: I1003 15:40:54.148075 5081 generic.go:334] "Generic (PLEG): container finished" podID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerID="e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c" exitCode=0 Oct 03 15:40:54 crc kubenswrapper[5081]: I1003 15:40:54.148144 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brsv5" event={"ID":"24082b32-f00e-48cd-810a-a1fdcdeef512","Type":"ContainerDied","Data":"e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c"} Oct 03 15:40:54 crc kubenswrapper[5081]: I1003 15:40:54.150940 5081 generic.go:334] "Generic (PLEG): container finished" podID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerID="3e522269434803c3a7f757e21f321d478237f7bda23c26c572acef6aca51623e" exitCode=0 Oct 03 15:40:54 crc kubenswrapper[5081]: I1003 15:40:54.150977 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" event={"ID":"42f4375e-1b8e-4805-8031-a87e31f1d6c4","Type":"ContainerDied","Data":"3e522269434803c3a7f757e21f321d478237f7bda23c26c572acef6aca51623e"} Oct 03 15:40:55 crc kubenswrapper[5081]: I1003 15:40:55.161767 5081 generic.go:334] "Generic (PLEG): container finished" podID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerID="9471cf221fb6541d386494c48edba096375f7858316882ad663ec4433d110abb" exitCode=0 Oct 03 15:40:55 crc kubenswrapper[5081]: I1003 15:40:55.161896 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" event={"ID":"42f4375e-1b8e-4805-8031-a87e31f1d6c4","Type":"ContainerDied","Data":"9471cf221fb6541d386494c48edba096375f7858316882ad663ec4433d110abb"} Oct 03 15:40:55 crc kubenswrapper[5081]: I1003 15:40:55.164382 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brsv5" event={"ID":"24082b32-f00e-48cd-810a-a1fdcdeef512","Type":"ContainerStarted","Data":"76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899"} Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.171101 5081 generic.go:334] "Generic (PLEG): container finished" podID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerID="76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899" exitCode=0 Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.171159 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brsv5" event={"ID":"24082b32-f00e-48cd-810a-a1fdcdeef512","Type":"ContainerDied","Data":"76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899"} Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.493395 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.615090 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-util\") pod \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.615168 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-bundle\") pod \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.615200 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhg4l\" (UniqueName: \"kubernetes.io/projected/42f4375e-1b8e-4805-8031-a87e31f1d6c4-kube-api-access-jhg4l\") pod \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\" (UID: \"42f4375e-1b8e-4805-8031-a87e31f1d6c4\") " Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.616026 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-bundle" (OuterVolumeSpecName: "bundle") pod "42f4375e-1b8e-4805-8031-a87e31f1d6c4" (UID: "42f4375e-1b8e-4805-8031-a87e31f1d6c4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.621698 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42f4375e-1b8e-4805-8031-a87e31f1d6c4-kube-api-access-jhg4l" (OuterVolumeSpecName: "kube-api-access-jhg4l") pod "42f4375e-1b8e-4805-8031-a87e31f1d6c4" (UID: "42f4375e-1b8e-4805-8031-a87e31f1d6c4"). InnerVolumeSpecName "kube-api-access-jhg4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.716519 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhg4l\" (UniqueName: \"kubernetes.io/projected/42f4375e-1b8e-4805-8031-a87e31f1d6c4-kube-api-access-jhg4l\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.716579 5081 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.738016 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-util" (OuterVolumeSpecName: "util") pod "42f4375e-1b8e-4805-8031-a87e31f1d6c4" (UID: "42f4375e-1b8e-4805-8031-a87e31f1d6c4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:40:56 crc kubenswrapper[5081]: I1003 15:40:56.817737 5081 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/42f4375e-1b8e-4805-8031-a87e31f1d6c4-util\") on node \"crc\" DevicePath \"\"" Oct 03 15:40:57 crc kubenswrapper[5081]: I1003 15:40:57.180510 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" event={"ID":"42f4375e-1b8e-4805-8031-a87e31f1d6c4","Type":"ContainerDied","Data":"0611ab43996f9d889f1eb9e059bb101b33dfb8f7bdd32f87a02e935853065362"} Oct 03 15:40:57 crc kubenswrapper[5081]: I1003 15:40:57.181053 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0611ab43996f9d889f1eb9e059bb101b33dfb8f7bdd32f87a02e935853065362" Oct 03 15:40:57 crc kubenswrapper[5081]: I1003 15:40:57.180579 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq" Oct 03 15:40:57 crc kubenswrapper[5081]: I1003 15:40:57.182353 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brsv5" event={"ID":"24082b32-f00e-48cd-810a-a1fdcdeef512","Type":"ContainerStarted","Data":"755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849"} Oct 03 15:40:57 crc kubenswrapper[5081]: I1003 15:40:57.204360 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-brsv5" podStartSLOduration=2.431147429 podStartE2EDuration="5.204341467s" podCreationTimestamp="2025-10-03 15:40:52 +0000 UTC" firstStartedPulling="2025-10-03 15:40:54.151651698 +0000 UTC m=+773.117208311" lastFinishedPulling="2025-10-03 15:40:56.924845736 +0000 UTC m=+775.890402349" observedRunningTime="2025-10-03 15:40:57.203515133 +0000 UTC m=+776.169071776" watchObservedRunningTime="2025-10-03 15:40:57.204341467 +0000 UTC m=+776.169898080" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.376811 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-82vx7"] Oct 03 15:41:00 crc kubenswrapper[5081]: E1003 15:41:00.377099 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerName="util" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.377113 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerName="util" Oct 03 15:41:00 crc kubenswrapper[5081]: E1003 15:41:00.377128 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerName="pull" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.377134 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerName="pull" Oct 03 15:41:00 crc kubenswrapper[5081]: E1003 15:41:00.377144 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerName="extract" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.377150 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerName="extract" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.377264 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="42f4375e-1b8e-4805-8031-a87e31f1d6c4" containerName="extract" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.377899 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.381031 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.381622 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.381839 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-h4p6x" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.399053 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-82vx7"] Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.573193 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrx24\" (UniqueName: \"kubernetes.io/projected/1eb24e7f-b4c5-4fdc-8acf-2c34145dd409-kube-api-access-hrx24\") pod \"nmstate-operator-858ddd8f98-82vx7\" (UID: \"1eb24e7f-b4c5-4fdc-8acf-2c34145dd409\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.648112 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.648190 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.648248 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.648991 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a97bc62e9210356568f624b3588b885554295304e0cb244f570d3f9d71dc1ba9"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.649050 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://a97bc62e9210356568f624b3588b885554295304e0cb244f570d3f9d71dc1ba9" gracePeriod=600 Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.674949 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrx24\" (UniqueName: \"kubernetes.io/projected/1eb24e7f-b4c5-4fdc-8acf-2c34145dd409-kube-api-access-hrx24\") pod \"nmstate-operator-858ddd8f98-82vx7\" (UID: \"1eb24e7f-b4c5-4fdc-8acf-2c34145dd409\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.699736 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrx24\" (UniqueName: \"kubernetes.io/projected/1eb24e7f-b4c5-4fdc-8acf-2c34145dd409-kube-api-access-hrx24\") pod \"nmstate-operator-858ddd8f98-82vx7\" (UID: \"1eb24e7f-b4c5-4fdc-8acf-2c34145dd409\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" Oct 03 15:41:00 crc kubenswrapper[5081]: I1003 15:41:00.997981 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" Oct 03 15:41:01 crc kubenswrapper[5081]: I1003 15:41:01.475975 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-82vx7"] Oct 03 15:41:02 crc kubenswrapper[5081]: I1003 15:41:02.214278 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" event={"ID":"1eb24e7f-b4c5-4fdc-8acf-2c34145dd409","Type":"ContainerStarted","Data":"5ae97946cc476717cef95bf3edd7ae94db133df8e353e7a180e0c722e6986214"} Oct 03 15:41:02 crc kubenswrapper[5081]: I1003 15:41:02.226021 5081 scope.go:117] "RemoveContainer" containerID="b12d5282af4729b63b6a5584f865a271f0dea9c44b6b61540f7477c2ce67aa7e" Oct 03 15:41:02 crc kubenswrapper[5081]: I1003 15:41:02.597293 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:41:02 crc kubenswrapper[5081]: I1003 15:41:02.597402 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:41:02 crc kubenswrapper[5081]: I1003 15:41:02.637436 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:41:03 crc kubenswrapper[5081]: I1003 15:41:03.260375 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:41:04 crc kubenswrapper[5081]: I1003 15:41:04.232104 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-7fljw_af6b6616-1e4c-4618-890b-7eb334b8c339/kube-multus/2.log" Oct 03 15:41:04 crc kubenswrapper[5081]: I1003 15:41:04.234723 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="a97bc62e9210356568f624b3588b885554295304e0cb244f570d3f9d71dc1ba9" exitCode=0 Oct 03 15:41:04 crc kubenswrapper[5081]: I1003 15:41:04.234802 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"a97bc62e9210356568f624b3588b885554295304e0cb244f570d3f9d71dc1ba9"} Oct 03 15:41:04 crc kubenswrapper[5081]: I1003 15:41:04.234868 5081 scope.go:117] "RemoveContainer" containerID="63af043e610585f95cf2fe036c18efbb219a40ffaa2e79808b6ccfbd888824bd" Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.063533 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-brsv5"] Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.242736 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"c5285c2706f0562112a34a4f95372325994d5f5da0f641a9bc545482a3b072b5"} Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.245292 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" event={"ID":"1eb24e7f-b4c5-4fdc-8acf-2c34145dd409","Type":"ContainerStarted","Data":"6ac669b36ae9a23144fa55a572a3d8234098792b2ea551b7db4df24d7c8011f8"} Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.245401 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-brsv5" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="registry-server" containerID="cri-o://755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849" gracePeriod=2 Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.282292 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-82vx7" podStartSLOduration=1.7536720730000002 podStartE2EDuration="5.282264144s" podCreationTimestamp="2025-10-03 15:41:00 +0000 UTC" firstStartedPulling="2025-10-03 15:41:01.496851764 +0000 UTC m=+780.462408377" lastFinishedPulling="2025-10-03 15:41:05.025443835 +0000 UTC m=+783.991000448" observedRunningTime="2025-10-03 15:41:05.280542914 +0000 UTC m=+784.246099537" watchObservedRunningTime="2025-10-03 15:41:05.282264144 +0000 UTC m=+784.247820757" Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.762151 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.957938 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-catalog-content\") pod \"24082b32-f00e-48cd-810a-a1fdcdeef512\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.958027 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-utilities\") pod \"24082b32-f00e-48cd-810a-a1fdcdeef512\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.958051 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxvc9\" (UniqueName: \"kubernetes.io/projected/24082b32-f00e-48cd-810a-a1fdcdeef512-kube-api-access-fxvc9\") pod \"24082b32-f00e-48cd-810a-a1fdcdeef512\" (UID: \"24082b32-f00e-48cd-810a-a1fdcdeef512\") " Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.960463 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-utilities" (OuterVolumeSpecName: "utilities") pod "24082b32-f00e-48cd-810a-a1fdcdeef512" (UID: "24082b32-f00e-48cd-810a-a1fdcdeef512"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:05 crc kubenswrapper[5081]: I1003 15:41:05.965873 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24082b32-f00e-48cd-810a-a1fdcdeef512-kube-api-access-fxvc9" (OuterVolumeSpecName: "kube-api-access-fxvc9") pod "24082b32-f00e-48cd-810a-a1fdcdeef512" (UID: "24082b32-f00e-48cd-810a-a1fdcdeef512"). InnerVolumeSpecName "kube-api-access-fxvc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.035296 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24082b32-f00e-48cd-810a-a1fdcdeef512" (UID: "24082b32-f00e-48cd-810a-a1fdcdeef512"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.060608 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.060653 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxvc9\" (UniqueName: \"kubernetes.io/projected/24082b32-f00e-48cd-810a-a1fdcdeef512-kube-api-access-fxvc9\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.060667 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24082b32-f00e-48cd-810a-a1fdcdeef512-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.253477 5081 generic.go:334] "Generic (PLEG): container finished" podID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerID="755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849" exitCode=0 Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.254413 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-brsv5" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.254796 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brsv5" event={"ID":"24082b32-f00e-48cd-810a-a1fdcdeef512","Type":"ContainerDied","Data":"755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849"} Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.254841 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-brsv5" event={"ID":"24082b32-f00e-48cd-810a-a1fdcdeef512","Type":"ContainerDied","Data":"5d0400e947ebceff5d179b8f9bfaa21e31bf9346f6f45664df55b459a90333d9"} Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.254867 5081 scope.go:117] "RemoveContainer" containerID="755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.277890 5081 scope.go:117] "RemoveContainer" containerID="76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.284487 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-brsv5"] Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.288717 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-brsv5"] Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.301660 5081 scope.go:117] "RemoveContainer" containerID="e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.316779 5081 scope.go:117] "RemoveContainer" containerID="755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849" Oct 03 15:41:06 crc kubenswrapper[5081]: E1003 15:41:06.317208 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849\": container with ID starting with 755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849 not found: ID does not exist" containerID="755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.317264 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849"} err="failed to get container status \"755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849\": rpc error: code = NotFound desc = could not find container \"755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849\": container with ID starting with 755e88420b5154a31b32b770f12db14680d49ffd28d0ddf0d519ac13bff5a849 not found: ID does not exist" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.317308 5081 scope.go:117] "RemoveContainer" containerID="76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899" Oct 03 15:41:06 crc kubenswrapper[5081]: E1003 15:41:06.318054 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899\": container with ID starting with 76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899 not found: ID does not exist" containerID="76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.318087 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899"} err="failed to get container status \"76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899\": rpc error: code = NotFound desc = could not find container \"76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899\": container with ID starting with 76878813e61cb10d80bc96074645ffbc504c47ac0f69ac8240a21ece2983e899 not found: ID does not exist" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.318105 5081 scope.go:117] "RemoveContainer" containerID="e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c" Oct 03 15:41:06 crc kubenswrapper[5081]: E1003 15:41:06.319347 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c\": container with ID starting with e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c not found: ID does not exist" containerID="e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c" Oct 03 15:41:06 crc kubenswrapper[5081]: I1003 15:41:06.319409 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c"} err="failed to get container status \"e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c\": rpc error: code = NotFound desc = could not find container \"e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c\": container with ID starting with e46b317744c39833cd7f739990512d13ea780eec18b02b04125d9fa28c57ee0c not found: ID does not exist" Oct 03 15:41:07 crc kubenswrapper[5081]: I1003 15:41:07.833927 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" path="/var/lib/kubelet/pods/24082b32-f00e-48cd-810a-a1fdcdeef512/volumes" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.097089 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb"] Oct 03 15:41:10 crc kubenswrapper[5081]: E1003 15:41:10.097875 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="registry-server" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.097893 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="registry-server" Oct 03 15:41:10 crc kubenswrapper[5081]: E1003 15:41:10.097907 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="extract-content" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.097916 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="extract-content" Oct 03 15:41:10 crc kubenswrapper[5081]: E1003 15:41:10.097926 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="extract-utilities" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.097934 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="extract-utilities" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.098080 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="24082b32-f00e-48cd-810a-a1fdcdeef512" containerName="registry-server" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.098863 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.101088 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-r6sgr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.112337 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.114307 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n6z4\" (UniqueName: \"kubernetes.io/projected/cef12029-69af-4423-9f86-535950d9a8c8-kube-api-access-2n6z4\") pod \"nmstate-metrics-fdff9cb8d-f74bb\" (UID: \"cef12029-69af-4423-9f86-535950d9a8c8\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.117376 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.118551 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.122882 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.147800 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.156725 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-r4whk"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.157522 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.215449 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n6z4\" (UniqueName: \"kubernetes.io/projected/cef12029-69af-4423-9f86-535950d9a8c8-kube-api-access-2n6z4\") pod \"nmstate-metrics-fdff9cb8d-f74bb\" (UID: \"cef12029-69af-4423-9f86-535950d9a8c8\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.215498 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-nmstate-lock\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.215527 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8q7r\" (UniqueName: \"kubernetes.io/projected/e5bd637d-02f0-46a0-af03-14f8a9235b60-kube-api-access-n8q7r\") pod \"nmstate-webhook-6cdbc54649-6lvtr\" (UID: \"e5bd637d-02f0-46a0-af03-14f8a9235b60\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.215572 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdqxt\" (UniqueName: \"kubernetes.io/projected/b09fac9a-1cba-40e0-ad40-341209ef1014-kube-api-access-hdqxt\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.215594 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-ovs-socket\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.215611 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e5bd637d-02f0-46a0-af03-14f8a9235b60-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-6lvtr\" (UID: \"e5bd637d-02f0-46a0-af03-14f8a9235b60\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.215633 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-dbus-socket\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.258507 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n6z4\" (UniqueName: \"kubernetes.io/projected/cef12029-69af-4423-9f86-535950d9a8c8-kube-api-access-2n6z4\") pod \"nmstate-metrics-fdff9cb8d-f74bb\" (UID: \"cef12029-69af-4423-9f86-535950d9a8c8\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.277984 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.280290 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.282166 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-prpgp" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.285129 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.285802 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.298481 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316452 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-dbus-socket\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316527 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/36d4396d-83ce-4642-b81f-77f773a8fe3f-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316697 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b5bp\" (UniqueName: \"kubernetes.io/projected/36d4396d-83ce-4642-b81f-77f773a8fe3f-kube-api-access-7b5bp\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316831 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-nmstate-lock\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316867 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8q7r\" (UniqueName: \"kubernetes.io/projected/e5bd637d-02f0-46a0-af03-14f8a9235b60-kube-api-access-n8q7r\") pod \"nmstate-webhook-6cdbc54649-6lvtr\" (UID: \"e5bd637d-02f0-46a0-af03-14f8a9235b60\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316882 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-nmstate-lock\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316899 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdqxt\" (UniqueName: \"kubernetes.io/projected/b09fac9a-1cba-40e0-ad40-341209ef1014-kube-api-access-hdqxt\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316935 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-ovs-socket\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316959 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e5bd637d-02f0-46a0-af03-14f8a9235b60-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-6lvtr\" (UID: \"e5bd637d-02f0-46a0-af03-14f8a9235b60\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316979 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-dbus-socket\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.316987 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/36d4396d-83ce-4642-b81f-77f773a8fe3f-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.317088 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b09fac9a-1cba-40e0-ad40-341209ef1014-ovs-socket\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: E1003 15:41:10.317148 5081 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 03 15:41:10 crc kubenswrapper[5081]: E1003 15:41:10.317196 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5bd637d-02f0-46a0-af03-14f8a9235b60-tls-key-pair podName:e5bd637d-02f0-46a0-af03-14f8a9235b60 nodeName:}" failed. No retries permitted until 2025-10-03 15:41:10.817177621 +0000 UTC m=+789.782734234 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/e5bd637d-02f0-46a0-af03-14f8a9235b60-tls-key-pair") pod "nmstate-webhook-6cdbc54649-6lvtr" (UID: "e5bd637d-02f0-46a0-af03-14f8a9235b60") : secret "openshift-nmstate-webhook" not found Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.334670 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdqxt\" (UniqueName: \"kubernetes.io/projected/b09fac9a-1cba-40e0-ad40-341209ef1014-kube-api-access-hdqxt\") pod \"nmstate-handler-r4whk\" (UID: \"b09fac9a-1cba-40e0-ad40-341209ef1014\") " pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.338537 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8q7r\" (UniqueName: \"kubernetes.io/projected/e5bd637d-02f0-46a0-af03-14f8a9235b60-kube-api-access-n8q7r\") pod \"nmstate-webhook-6cdbc54649-6lvtr\" (UID: \"e5bd637d-02f0-46a0-af03-14f8a9235b60\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.418032 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/36d4396d-83ce-4642-b81f-77f773a8fe3f-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.418400 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/36d4396d-83ce-4642-b81f-77f773a8fe3f-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.418446 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b5bp\" (UniqueName: \"kubernetes.io/projected/36d4396d-83ce-4642-b81f-77f773a8fe3f-kube-api-access-7b5bp\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: E1003 15:41:10.418649 5081 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 03 15:41:10 crc kubenswrapper[5081]: E1003 15:41:10.418761 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36d4396d-83ce-4642-b81f-77f773a8fe3f-plugin-serving-cert podName:36d4396d-83ce-4642-b81f-77f773a8fe3f nodeName:}" failed. No retries permitted until 2025-10-03 15:41:10.918730007 +0000 UTC m=+789.884286620 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/36d4396d-83ce-4642-b81f-77f773a8fe3f-plugin-serving-cert") pod "nmstate-console-plugin-6b874cbd85-fxssj" (UID: "36d4396d-83ce-4642-b81f-77f773a8fe3f") : secret "plugin-serving-cert" not found Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.419362 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/36d4396d-83ce-4642-b81f-77f773a8fe3f-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.420594 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.437278 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b5bp\" (UniqueName: \"kubernetes.io/projected/36d4396d-83ce-4642-b81f-77f773a8fe3f-kube-api-access-7b5bp\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.484190 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.516020 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-58847c794-ht7kb"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.517018 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.626003 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-serving-cert\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.626048 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-config\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.626066 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qkrr\" (UniqueName: \"kubernetes.io/projected/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-kube-api-access-5qkrr\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.632649 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58847c794-ht7kb"] Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.635949 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-trusted-ca-bundle\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.635984 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-oauth-config\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.636030 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-oauth-serving-cert\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.636083 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-service-ca\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.737685 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-trusted-ca-bundle\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.737740 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-oauth-config\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.737771 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-oauth-serving-cert\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.737801 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-service-ca\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.737868 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-serving-cert\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.737890 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-config\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.737912 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qkrr\" (UniqueName: \"kubernetes.io/projected/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-kube-api-access-5qkrr\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.738848 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-service-ca\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.739031 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-trusted-ca-bundle\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.741671 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-config\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.743378 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-oauth-serving-cert\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.745455 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-serving-cert\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.746328 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-console-oauth-config\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.759198 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qkrr\" (UniqueName: \"kubernetes.io/projected/b1c5dae8-59d6-4875-a3b4-f5f821e6d17f-kube-api-access-5qkrr\") pod \"console-58847c794-ht7kb\" (UID: \"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f\") " pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.839333 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e5bd637d-02f0-46a0-af03-14f8a9235b60-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-6lvtr\" (UID: \"e5bd637d-02f0-46a0-af03-14f8a9235b60\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.843234 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e5bd637d-02f0-46a0-af03-14f8a9235b60-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-6lvtr\" (UID: \"e5bd637d-02f0-46a0-af03-14f8a9235b60\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.844281 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.940627 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/36d4396d-83ce-4642-b81f-77f773a8fe3f-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.944528 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/36d4396d-83ce-4642-b81f-77f773a8fe3f-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-fxssj\" (UID: \"36d4396d-83ce-4642-b81f-77f773a8fe3f\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:10 crc kubenswrapper[5081]: I1003 15:41:10.977795 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb"] Oct 03 15:41:11 crc kubenswrapper[5081]: I1003 15:41:11.038816 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:11 crc kubenswrapper[5081]: I1003 15:41:11.205367 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" Oct 03 15:41:11 crc kubenswrapper[5081]: I1003 15:41:11.290606 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58847c794-ht7kb"] Oct 03 15:41:11 crc kubenswrapper[5081]: I1003 15:41:11.299648 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-r4whk" event={"ID":"b09fac9a-1cba-40e0-ad40-341209ef1014","Type":"ContainerStarted","Data":"6bcdc6b3a7408067940e0ba8f347d72f45d0284d1618046f4ef098df69fce0fb"} Oct 03 15:41:11 crc kubenswrapper[5081]: I1003 15:41:11.300737 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" event={"ID":"cef12029-69af-4423-9f86-535950d9a8c8","Type":"ContainerStarted","Data":"89efd55e8c6523baa9a8ccf97fd45d2ced75212c4292ac6667874bfba70dd6ce"} Oct 03 15:41:11 crc kubenswrapper[5081]: W1003 15:41:11.302307 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1c5dae8_59d6_4875_a3b4_f5f821e6d17f.slice/crio-06fc67b3e5d65f285b835c6e1a7ddc0f1ede35ee3c0d2f2f320e262e6fe60213 WatchSource:0}: Error finding container 06fc67b3e5d65f285b835c6e1a7ddc0f1ede35ee3c0d2f2f320e262e6fe60213: Status 404 returned error can't find the container with id 06fc67b3e5d65f285b835c6e1a7ddc0f1ede35ee3c0d2f2f320e262e6fe60213 Oct 03 15:41:11 crc kubenswrapper[5081]: I1003 15:41:11.448663 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr"] Oct 03 15:41:11 crc kubenswrapper[5081]: W1003 15:41:11.458796 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5bd637d_02f0_46a0_af03_14f8a9235b60.slice/crio-cc0d36470599e39e004b919584e451f722bb12aebd49586d5f7cf80cb014859b WatchSource:0}: Error finding container cc0d36470599e39e004b919584e451f722bb12aebd49586d5f7cf80cb014859b: Status 404 returned error can't find the container with id cc0d36470599e39e004b919584e451f722bb12aebd49586d5f7cf80cb014859b Oct 03 15:41:11 crc kubenswrapper[5081]: I1003 15:41:11.592796 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj"] Oct 03 15:41:11 crc kubenswrapper[5081]: W1003 15:41:11.600287 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36d4396d_83ce_4642_b81f_77f773a8fe3f.slice/crio-43088aa7ad13384f7637d5dd1bb12d4b305d3d4cb0bb23cf51f886743e21c128 WatchSource:0}: Error finding container 43088aa7ad13384f7637d5dd1bb12d4b305d3d4cb0bb23cf51f886743e21c128: Status 404 returned error can't find the container with id 43088aa7ad13384f7637d5dd1bb12d4b305d3d4cb0bb23cf51f886743e21c128 Oct 03 15:41:12 crc kubenswrapper[5081]: I1003 15:41:12.307744 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58847c794-ht7kb" event={"ID":"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f","Type":"ContainerStarted","Data":"dbcf670f1c7b15bdf005d8013166b33cb81bd80ed3d0c39e0ca3545bb81c78bb"} Oct 03 15:41:12 crc kubenswrapper[5081]: I1003 15:41:12.307799 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58847c794-ht7kb" event={"ID":"b1c5dae8-59d6-4875-a3b4-f5f821e6d17f","Type":"ContainerStarted","Data":"06fc67b3e5d65f285b835c6e1a7ddc0f1ede35ee3c0d2f2f320e262e6fe60213"} Oct 03 15:41:12 crc kubenswrapper[5081]: I1003 15:41:12.311511 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" event={"ID":"e5bd637d-02f0-46a0-af03-14f8a9235b60","Type":"ContainerStarted","Data":"cc0d36470599e39e004b919584e451f722bb12aebd49586d5f7cf80cb014859b"} Oct 03 15:41:12 crc kubenswrapper[5081]: I1003 15:41:12.313022 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" event={"ID":"36d4396d-83ce-4642-b81f-77f773a8fe3f","Type":"ContainerStarted","Data":"43088aa7ad13384f7637d5dd1bb12d4b305d3d4cb0bb23cf51f886743e21c128"} Oct 03 15:41:12 crc kubenswrapper[5081]: I1003 15:41:12.333971 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-58847c794-ht7kb" podStartSLOduration=2.333942651 podStartE2EDuration="2.333942651s" podCreationTimestamp="2025-10-03 15:41:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:41:12.332814598 +0000 UTC m=+791.298371361" watchObservedRunningTime="2025-10-03 15:41:12.333942651 +0000 UTC m=+791.299499284" Oct 03 15:41:19 crc kubenswrapper[5081]: I1003 15:41:19.355912 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" event={"ID":"cef12029-69af-4423-9f86-535950d9a8c8","Type":"ContainerStarted","Data":"dd69313f5fcdc5b9ef0966d48066e381eb509360d319ad9b03624a0059032fb2"} Oct 03 15:41:19 crc kubenswrapper[5081]: I1003 15:41:19.358262 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-r4whk" event={"ID":"b09fac9a-1cba-40e0-ad40-341209ef1014","Type":"ContainerStarted","Data":"d5bdeed810c8ab6387d8fb3f92bed6aa4dc1e50da6d9e831388f6aaef10db139"} Oct 03 15:41:19 crc kubenswrapper[5081]: I1003 15:41:19.358499 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:19 crc kubenswrapper[5081]: I1003 15:41:19.360315 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" event={"ID":"e5bd637d-02f0-46a0-af03-14f8a9235b60","Type":"ContainerStarted","Data":"9b44ccb21290214cb906f6d9ffca834011bc161428613e85c7feba465e085b7c"} Oct 03 15:41:19 crc kubenswrapper[5081]: I1003 15:41:19.360534 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:19 crc kubenswrapper[5081]: I1003 15:41:19.379064 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-r4whk" podStartSLOduration=1.922967271 podStartE2EDuration="9.379042728s" podCreationTimestamp="2025-10-03 15:41:10 +0000 UTC" firstStartedPulling="2025-10-03 15:41:10.538515228 +0000 UTC m=+789.504071841" lastFinishedPulling="2025-10-03 15:41:17.994590684 +0000 UTC m=+796.960147298" observedRunningTime="2025-10-03 15:41:19.376443603 +0000 UTC m=+798.342000256" watchObservedRunningTime="2025-10-03 15:41:19.379042728 +0000 UTC m=+798.344599351" Oct 03 15:41:19 crc kubenswrapper[5081]: I1003 15:41:19.398319 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" podStartSLOduration=2.859591053 podStartE2EDuration="9.398290082s" podCreationTimestamp="2025-10-03 15:41:10 +0000 UTC" firstStartedPulling="2025-10-03 15:41:11.460836868 +0000 UTC m=+790.426393481" lastFinishedPulling="2025-10-03 15:41:17.999535857 +0000 UTC m=+796.965092510" observedRunningTime="2025-10-03 15:41:19.39405564 +0000 UTC m=+798.359612263" watchObservedRunningTime="2025-10-03 15:41:19.398290082 +0000 UTC m=+798.363846705" Oct 03 15:41:20 crc kubenswrapper[5081]: I1003 15:41:20.844991 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:20 crc kubenswrapper[5081]: I1003 15:41:20.845686 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:20 crc kubenswrapper[5081]: I1003 15:41:20.852866 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:21 crc kubenswrapper[5081]: I1003 15:41:21.378922 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-58847c794-ht7kb" Oct 03 15:41:21 crc kubenswrapper[5081]: I1003 15:41:21.489784 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-hr5zj"] Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.513509 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-r4whk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.676771 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-shhpk"] Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.679303 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.696000 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shhpk"] Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.717328 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-utilities\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.717478 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpgbh\" (UniqueName: \"kubernetes.io/projected/595e397e-53d8-42aa-8fd6-f679fe525ba8-kube-api-access-kpgbh\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.717778 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-catalog-content\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.820423 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-catalog-content\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.820563 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-utilities\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.820792 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpgbh\" (UniqueName: \"kubernetes.io/projected/595e397e-53d8-42aa-8fd6-f679fe525ba8-kube-api-access-kpgbh\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.821534 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-catalog-content\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.821658 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-utilities\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:25 crc kubenswrapper[5081]: I1003 15:41:25.854870 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpgbh\" (UniqueName: \"kubernetes.io/projected/595e397e-53d8-42aa-8fd6-f679fe525ba8-kube-api-access-kpgbh\") pod \"community-operators-shhpk\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:26 crc kubenswrapper[5081]: I1003 15:41:26.016856 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:27 crc kubenswrapper[5081]: I1003 15:41:27.136460 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shhpk"] Oct 03 15:41:27 crc kubenswrapper[5081]: W1003 15:41:27.143011 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod595e397e_53d8_42aa_8fd6_f679fe525ba8.slice/crio-0346e2f536b269b87ef33e3d58a5e000993639e4ad786ba0117c273572e357d5 WatchSource:0}: Error finding container 0346e2f536b269b87ef33e3d58a5e000993639e4ad786ba0117c273572e357d5: Status 404 returned error can't find the container with id 0346e2f536b269b87ef33e3d58a5e000993639e4ad786ba0117c273572e357d5 Oct 03 15:41:27 crc kubenswrapper[5081]: I1003 15:41:27.419863 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" event={"ID":"36d4396d-83ce-4642-b81f-77f773a8fe3f","Type":"ContainerStarted","Data":"31dbf17152bb574c96069a4a3f37fcb1248200fc3da0bd0b274b716562b1d41f"} Oct 03 15:41:27 crc kubenswrapper[5081]: I1003 15:41:27.421968 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shhpk" event={"ID":"595e397e-53d8-42aa-8fd6-f679fe525ba8","Type":"ContainerStarted","Data":"0346e2f536b269b87ef33e3d58a5e000993639e4ad786ba0117c273572e357d5"} Oct 03 15:41:28 crc kubenswrapper[5081]: I1003 15:41:28.431820 5081 generic.go:334] "Generic (PLEG): container finished" podID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerID="a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c" exitCode=0 Oct 03 15:41:28 crc kubenswrapper[5081]: I1003 15:41:28.431896 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shhpk" event={"ID":"595e397e-53d8-42aa-8fd6-f679fe525ba8","Type":"ContainerDied","Data":"a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c"} Oct 03 15:41:28 crc kubenswrapper[5081]: I1003 15:41:28.461377 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-fxssj" podStartSLOduration=3.317889397 podStartE2EDuration="18.461347185s" podCreationTimestamp="2025-10-03 15:41:10 +0000 UTC" firstStartedPulling="2025-10-03 15:41:11.603154238 +0000 UTC m=+790.568710851" lastFinishedPulling="2025-10-03 15:41:26.746612026 +0000 UTC m=+805.712168639" observedRunningTime="2025-10-03 15:41:28.459058189 +0000 UTC m=+807.424614882" watchObservedRunningTime="2025-10-03 15:41:28.461347185 +0000 UTC m=+807.426903818" Oct 03 15:41:31 crc kubenswrapper[5081]: I1003 15:41:31.046716 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-6lvtr" Oct 03 15:41:33 crc kubenswrapper[5081]: I1003 15:41:33.471265 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" event={"ID":"cef12029-69af-4423-9f86-535950d9a8c8","Type":"ContainerStarted","Data":"86e3f710d729934ab51d3efdabbfde0bc4cfa69ca8d2092be212e917d482e3e2"} Oct 03 15:41:33 crc kubenswrapper[5081]: I1003 15:41:33.475357 5081 generic.go:334] "Generic (PLEG): container finished" podID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerID="9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14" exitCode=0 Oct 03 15:41:33 crc kubenswrapper[5081]: I1003 15:41:33.475446 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shhpk" event={"ID":"595e397e-53d8-42aa-8fd6-f679fe525ba8","Type":"ContainerDied","Data":"9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14"} Oct 03 15:41:33 crc kubenswrapper[5081]: I1003 15:41:33.497602 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-f74bb" podStartSLOduration=1.9134205469999999 podStartE2EDuration="23.49753749s" podCreationTimestamp="2025-10-03 15:41:10 +0000 UTC" firstStartedPulling="2025-10-03 15:41:10.987901764 +0000 UTC m=+789.953458377" lastFinishedPulling="2025-10-03 15:41:32.572018687 +0000 UTC m=+811.537575320" observedRunningTime="2025-10-03 15:41:33.492990019 +0000 UTC m=+812.458546722" watchObservedRunningTime="2025-10-03 15:41:33.49753749 +0000 UTC m=+812.463094103" Oct 03 15:41:35 crc kubenswrapper[5081]: I1003 15:41:35.490408 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shhpk" event={"ID":"595e397e-53d8-42aa-8fd6-f679fe525ba8","Type":"ContainerStarted","Data":"832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1"} Oct 03 15:41:35 crc kubenswrapper[5081]: I1003 15:41:35.512733 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-shhpk" podStartSLOduration=4.979597114 podStartE2EDuration="10.512711674s" podCreationTimestamp="2025-10-03 15:41:25 +0000 UTC" firstStartedPulling="2025-10-03 15:41:28.745884982 +0000 UTC m=+807.711441615" lastFinishedPulling="2025-10-03 15:41:34.278999562 +0000 UTC m=+813.244556175" observedRunningTime="2025-10-03 15:41:35.51222428 +0000 UTC m=+814.477780903" watchObservedRunningTime="2025-10-03 15:41:35.512711674 +0000 UTC m=+814.478268297" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.017826 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.018345 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.338182 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8g4x2"] Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.339654 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.347362 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g4x2"] Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.509912 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-catalog-content\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.510800 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-utilities\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.511077 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztvdf\" (UniqueName: \"kubernetes.io/projected/f1c533ac-8395-4e69-b11b-406221295a3b-kube-api-access-ztvdf\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.612393 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztvdf\" (UniqueName: \"kubernetes.io/projected/f1c533ac-8395-4e69-b11b-406221295a3b-kube-api-access-ztvdf\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.612477 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-catalog-content\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.612502 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-utilities\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.613169 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-utilities\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.613822 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-catalog-content\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.638841 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztvdf\" (UniqueName: \"kubernetes.io/projected/f1c533ac-8395-4e69-b11b-406221295a3b-kube-api-access-ztvdf\") pod \"redhat-marketplace-8g4x2\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:36 crc kubenswrapper[5081]: I1003 15:41:36.656545 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:37 crc kubenswrapper[5081]: I1003 15:41:37.106075 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-shhpk" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="registry-server" probeResult="failure" output=< Oct 03 15:41:37 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 15:41:37 crc kubenswrapper[5081]: > Oct 03 15:41:37 crc kubenswrapper[5081]: I1003 15:41:37.138936 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g4x2"] Oct 03 15:41:37 crc kubenswrapper[5081]: I1003 15:41:37.507368 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g4x2" event={"ID":"f1c533ac-8395-4e69-b11b-406221295a3b","Type":"ContainerStarted","Data":"e658db7e8ef91c374c137c0299742a6edcb40b6ead470467dd2476cec2a28bd7"} Oct 03 15:41:37 crc kubenswrapper[5081]: I1003 15:41:37.508860 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g4x2" event={"ID":"f1c533ac-8395-4e69-b11b-406221295a3b","Type":"ContainerStarted","Data":"6e0ddad16b380aa5e2d1e3f28ae97dabb920e634e64e49de2970e949dfd1b830"} Oct 03 15:41:38 crc kubenswrapper[5081]: I1003 15:41:38.516828 5081 generic.go:334] "Generic (PLEG): container finished" podID="f1c533ac-8395-4e69-b11b-406221295a3b" containerID="e658db7e8ef91c374c137c0299742a6edcb40b6ead470467dd2476cec2a28bd7" exitCode=0 Oct 03 15:41:38 crc kubenswrapper[5081]: I1003 15:41:38.516930 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g4x2" event={"ID":"f1c533ac-8395-4e69-b11b-406221295a3b","Type":"ContainerDied","Data":"e658db7e8ef91c374c137c0299742a6edcb40b6ead470467dd2476cec2a28bd7"} Oct 03 15:41:39 crc kubenswrapper[5081]: I1003 15:41:39.527606 5081 generic.go:334] "Generic (PLEG): container finished" podID="f1c533ac-8395-4e69-b11b-406221295a3b" containerID="4a8bce6b92fd618fd9044f54fee341b51a19082ddfc8cf724475deb62413436b" exitCode=0 Oct 03 15:41:39 crc kubenswrapper[5081]: I1003 15:41:39.527698 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g4x2" event={"ID":"f1c533ac-8395-4e69-b11b-406221295a3b","Type":"ContainerDied","Data":"4a8bce6b92fd618fd9044f54fee341b51a19082ddfc8cf724475deb62413436b"} Oct 03 15:41:41 crc kubenswrapper[5081]: I1003 15:41:41.546130 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g4x2" event={"ID":"f1c533ac-8395-4e69-b11b-406221295a3b","Type":"ContainerStarted","Data":"08ac70bb6f4f09bf71b76919ab435a25a5dca989a51851caaed2491f81d19626"} Oct 03 15:41:41 crc kubenswrapper[5081]: I1003 15:41:41.566545 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8g4x2" podStartSLOduration=2.643779374 podStartE2EDuration="5.566522793s" podCreationTimestamp="2025-10-03 15:41:36 +0000 UTC" firstStartedPulling="2025-10-03 15:41:37.508977202 +0000 UTC m=+816.474533815" lastFinishedPulling="2025-10-03 15:41:40.431720621 +0000 UTC m=+819.397277234" observedRunningTime="2025-10-03 15:41:41.56570125 +0000 UTC m=+820.531257883" watchObservedRunningTime="2025-10-03 15:41:41.566522793 +0000 UTC m=+820.532079406" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.289011 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t"] Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.291154 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.293265 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.306945 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t"] Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.492095 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.492404 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.492506 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw62p\" (UniqueName: \"kubernetes.io/projected/52892b90-1940-4bfa-8460-c3dce0452734-kube-api-access-xw62p\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.593376 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.593433 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw62p\" (UniqueName: \"kubernetes.io/projected/52892b90-1940-4bfa-8460-c3dce0452734-kube-api-access-xw62p\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.593481 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.594174 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.594180 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.613145 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw62p\" (UniqueName: \"kubernetes.io/projected/52892b90-1940-4bfa-8460-c3dce0452734-kube-api-access-xw62p\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:45 crc kubenswrapper[5081]: I1003 15:41:45.615841 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.024082 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t"] Oct 03 15:41:46 crc kubenswrapper[5081]: W1003 15:41:46.030003 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52892b90_1940_4bfa_8460_c3dce0452734.slice/crio-83006f68d77f540855a575f84a1842a33a660ddba1d01e36db78ad24f1981b9e WatchSource:0}: Error finding container 83006f68d77f540855a575f84a1842a33a660ddba1d01e36db78ad24f1981b9e: Status 404 returned error can't find the container with id 83006f68d77f540855a575f84a1842a33a660ddba1d01e36db78ad24f1981b9e Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.074653 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.144646 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.533380 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-hr5zj" podUID="87f6080d-917b-4d20-a744-9fb3bad43a77" containerName="console" containerID="cri-o://92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6" gracePeriod=15 Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.580199 5081 generic.go:334] "Generic (PLEG): container finished" podID="52892b90-1940-4bfa-8460-c3dce0452734" containerID="1609c74ce3b6e0f5af4a30a60c2d6d10be46f1d1de75ec8f55c7b0eec3920908" exitCode=0 Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.580257 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" event={"ID":"52892b90-1940-4bfa-8460-c3dce0452734","Type":"ContainerDied","Data":"1609c74ce3b6e0f5af4a30a60c2d6d10be46f1d1de75ec8f55c7b0eec3920908"} Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.580311 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" event={"ID":"52892b90-1940-4bfa-8460-c3dce0452734","Type":"ContainerStarted","Data":"83006f68d77f540855a575f84a1842a33a660ddba1d01e36db78ad24f1981b9e"} Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.657311 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.657385 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.699522 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.933536 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-hr5zj_87f6080d-917b-4d20-a744-9fb3bad43a77/console/0.log" Oct 03 15:41:46 crc kubenswrapper[5081]: I1003 15:41:46.933634 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.116340 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-trusted-ca-bundle\") pod \"87f6080d-917b-4d20-a744-9fb3bad43a77\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.116830 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-serving-cert\") pod \"87f6080d-917b-4d20-a744-9fb3bad43a77\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.116883 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-oauth-config\") pod \"87f6080d-917b-4d20-a744-9fb3bad43a77\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.116919 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-console-config\") pod \"87f6080d-917b-4d20-a744-9fb3bad43a77\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.116955 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhz5p\" (UniqueName: \"kubernetes.io/projected/87f6080d-917b-4d20-a744-9fb3bad43a77-kube-api-access-jhz5p\") pod \"87f6080d-917b-4d20-a744-9fb3bad43a77\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.116984 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-service-ca\") pod \"87f6080d-917b-4d20-a744-9fb3bad43a77\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.117010 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-oauth-serving-cert\") pod \"87f6080d-917b-4d20-a744-9fb3bad43a77\" (UID: \"87f6080d-917b-4d20-a744-9fb3bad43a77\") " Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.117926 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-console-config" (OuterVolumeSpecName: "console-config") pod "87f6080d-917b-4d20-a744-9fb3bad43a77" (UID: "87f6080d-917b-4d20-a744-9fb3bad43a77"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.117960 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "87f6080d-917b-4d20-a744-9fb3bad43a77" (UID: "87f6080d-917b-4d20-a744-9fb3bad43a77"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.118211 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-service-ca" (OuterVolumeSpecName: "service-ca") pod "87f6080d-917b-4d20-a744-9fb3bad43a77" (UID: "87f6080d-917b-4d20-a744-9fb3bad43a77"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.118340 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "87f6080d-917b-4d20-a744-9fb3bad43a77" (UID: "87f6080d-917b-4d20-a744-9fb3bad43a77"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.123191 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "87f6080d-917b-4d20-a744-9fb3bad43a77" (UID: "87f6080d-917b-4d20-a744-9fb3bad43a77"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.123509 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "87f6080d-917b-4d20-a744-9fb3bad43a77" (UID: "87f6080d-917b-4d20-a744-9fb3bad43a77"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.125154 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87f6080d-917b-4d20-a744-9fb3bad43a77-kube-api-access-jhz5p" (OuterVolumeSpecName: "kube-api-access-jhz5p") pod "87f6080d-917b-4d20-a744-9fb3bad43a77" (UID: "87f6080d-917b-4d20-a744-9fb3bad43a77"). InnerVolumeSpecName "kube-api-access-jhz5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.218427 5081 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.218473 5081 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.218484 5081 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87f6080d-917b-4d20-a744-9fb3bad43a77-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.218493 5081 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.218503 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhz5p\" (UniqueName: \"kubernetes.io/projected/87f6080d-917b-4d20-a744-9fb3bad43a77-kube-api-access-jhz5p\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.218515 5081 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.218523 5081 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87f6080d-917b-4d20-a744-9fb3bad43a77-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.587398 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-hr5zj_87f6080d-917b-4d20-a744-9fb3bad43a77/console/0.log" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.587456 5081 generic.go:334] "Generic (PLEG): container finished" podID="87f6080d-917b-4d20-a744-9fb3bad43a77" containerID="92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6" exitCode=2 Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.588351 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hr5zj" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.588485 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hr5zj" event={"ID":"87f6080d-917b-4d20-a744-9fb3bad43a77","Type":"ContainerDied","Data":"92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6"} Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.588532 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hr5zj" event={"ID":"87f6080d-917b-4d20-a744-9fb3bad43a77","Type":"ContainerDied","Data":"7e0b72764571169d6ef421b91c63fb9280939d74aa35344e8923e38e16041f0f"} Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.588553 5081 scope.go:117] "RemoveContainer" containerID="92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.620394 5081 scope.go:117] "RemoveContainer" containerID="92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6" Oct 03 15:41:47 crc kubenswrapper[5081]: E1003 15:41:47.625079 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6\": container with ID starting with 92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6 not found: ID does not exist" containerID="92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.625162 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6"} err="failed to get container status \"92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6\": rpc error: code = NotFound desc = could not find container \"92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6\": container with ID starting with 92217d79b29373fe650da6ff7a7d84823957ea0e93363e9219bf06a9567875a6 not found: ID does not exist" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.630995 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-hr5zj"] Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.637290 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-hr5zj"] Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.639193 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:47 crc kubenswrapper[5081]: I1003 15:41:47.837276 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87f6080d-917b-4d20-a744-9fb3bad43a77" path="/var/lib/kubelet/pods/87f6080d-917b-4d20-a744-9fb3bad43a77/volumes" Oct 03 15:41:49 crc kubenswrapper[5081]: I1003 15:41:49.646988 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shhpk"] Oct 03 15:41:49 crc kubenswrapper[5081]: I1003 15:41:49.647715 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-shhpk" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="registry-server" containerID="cri-o://832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1" gracePeriod=2 Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.231836 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.363803 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpgbh\" (UniqueName: \"kubernetes.io/projected/595e397e-53d8-42aa-8fd6-f679fe525ba8-kube-api-access-kpgbh\") pod \"595e397e-53d8-42aa-8fd6-f679fe525ba8\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.364264 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-utilities\") pod \"595e397e-53d8-42aa-8fd6-f679fe525ba8\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.364309 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-catalog-content\") pod \"595e397e-53d8-42aa-8fd6-f679fe525ba8\" (UID: \"595e397e-53d8-42aa-8fd6-f679fe525ba8\") " Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.365043 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-utilities" (OuterVolumeSpecName: "utilities") pod "595e397e-53d8-42aa-8fd6-f679fe525ba8" (UID: "595e397e-53d8-42aa-8fd6-f679fe525ba8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.371819 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/595e397e-53d8-42aa-8fd6-f679fe525ba8-kube-api-access-kpgbh" (OuterVolumeSpecName: "kube-api-access-kpgbh") pod "595e397e-53d8-42aa-8fd6-f679fe525ba8" (UID: "595e397e-53d8-42aa-8fd6-f679fe525ba8"). InnerVolumeSpecName "kube-api-access-kpgbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.413643 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "595e397e-53d8-42aa-8fd6-f679fe525ba8" (UID: "595e397e-53d8-42aa-8fd6-f679fe525ba8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.466099 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.466142 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/595e397e-53d8-42aa-8fd6-f679fe525ba8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.466154 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpgbh\" (UniqueName: \"kubernetes.io/projected/595e397e-53d8-42aa-8fd6-f679fe525ba8-kube-api-access-kpgbh\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.607877 5081 generic.go:334] "Generic (PLEG): container finished" podID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerID="832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1" exitCode=0 Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.607939 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shhpk" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.607973 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shhpk" event={"ID":"595e397e-53d8-42aa-8fd6-f679fe525ba8","Type":"ContainerDied","Data":"832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1"} Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.608024 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shhpk" event={"ID":"595e397e-53d8-42aa-8fd6-f679fe525ba8","Type":"ContainerDied","Data":"0346e2f536b269b87ef33e3d58a5e000993639e4ad786ba0117c273572e357d5"} Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.608046 5081 scope.go:117] "RemoveContainer" containerID="832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.610742 5081 generic.go:334] "Generic (PLEG): container finished" podID="52892b90-1940-4bfa-8460-c3dce0452734" containerID="f3ae7f54983f4bf02a33456a5906128b77c63c3b3e9388937507c0c9342c01ba" exitCode=0 Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.610796 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" event={"ID":"52892b90-1940-4bfa-8460-c3dce0452734","Type":"ContainerDied","Data":"f3ae7f54983f4bf02a33456a5906128b77c63c3b3e9388937507c0c9342c01ba"} Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.629830 5081 scope.go:117] "RemoveContainer" containerID="9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.643970 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shhpk"] Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.647666 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-shhpk"] Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.669405 5081 scope.go:117] "RemoveContainer" containerID="a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.695845 5081 scope.go:117] "RemoveContainer" containerID="832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1" Oct 03 15:41:50 crc kubenswrapper[5081]: E1003 15:41:50.696360 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1\": container with ID starting with 832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1 not found: ID does not exist" containerID="832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.696394 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1"} err="failed to get container status \"832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1\": rpc error: code = NotFound desc = could not find container \"832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1\": container with ID starting with 832add9ed21986ca6aee7bbb44c0ea5fd202e7b06091993ed02895f6a426e8c1 not found: ID does not exist" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.696437 5081 scope.go:117] "RemoveContainer" containerID="9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14" Oct 03 15:41:50 crc kubenswrapper[5081]: E1003 15:41:50.696696 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14\": container with ID starting with 9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14 not found: ID does not exist" containerID="9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.696719 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14"} err="failed to get container status \"9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14\": rpc error: code = NotFound desc = could not find container \"9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14\": container with ID starting with 9a584501c6d13794db6a1f12e54a478dbc2350484fe9d49565312f6b5bdc5f14 not found: ID does not exist" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.696731 5081 scope.go:117] "RemoveContainer" containerID="a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c" Oct 03 15:41:50 crc kubenswrapper[5081]: E1003 15:41:50.697011 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c\": container with ID starting with a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c not found: ID does not exist" containerID="a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c" Oct 03 15:41:50 crc kubenswrapper[5081]: I1003 15:41:50.697047 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c"} err="failed to get container status \"a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c\": rpc error: code = NotFound desc = could not find container \"a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c\": container with ID starting with a260a1767ec72d05e4feedd0bd3db589bbe8fcd7d2ab04c3369dd9350309d57c not found: ID does not exist" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.248814 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g4x2"] Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.249095 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8g4x2" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="registry-server" containerID="cri-o://08ac70bb6f4f09bf71b76919ab435a25a5dca989a51851caaed2491f81d19626" gracePeriod=2 Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.634593 5081 generic.go:334] "Generic (PLEG): container finished" podID="f1c533ac-8395-4e69-b11b-406221295a3b" containerID="08ac70bb6f4f09bf71b76919ab435a25a5dca989a51851caaed2491f81d19626" exitCode=0 Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.634669 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g4x2" event={"ID":"f1c533ac-8395-4e69-b11b-406221295a3b","Type":"ContainerDied","Data":"08ac70bb6f4f09bf71b76919ab435a25a5dca989a51851caaed2491f81d19626"} Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.635322 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8g4x2" event={"ID":"f1c533ac-8395-4e69-b11b-406221295a3b","Type":"ContainerDied","Data":"6e0ddad16b380aa5e2d1e3f28ae97dabb920e634e64e49de2970e949dfd1b830"} Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.635386 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e0ddad16b380aa5e2d1e3f28ae97dabb920e634e64e49de2970e949dfd1b830" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.636417 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.639153 5081 generic.go:334] "Generic (PLEG): container finished" podID="52892b90-1940-4bfa-8460-c3dce0452734" containerID="4f3b1c08dd388c3455dd2e319d5341b3fbc540603dbccccc64fe5b1134de46e9" exitCode=0 Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.639197 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" event={"ID":"52892b90-1940-4bfa-8460-c3dce0452734","Type":"ContainerDied","Data":"4f3b1c08dd388c3455dd2e319d5341b3fbc540603dbccccc64fe5b1134de46e9"} Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.690152 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-catalog-content\") pod \"f1c533ac-8395-4e69-b11b-406221295a3b\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.690334 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztvdf\" (UniqueName: \"kubernetes.io/projected/f1c533ac-8395-4e69-b11b-406221295a3b-kube-api-access-ztvdf\") pod \"f1c533ac-8395-4e69-b11b-406221295a3b\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.690391 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-utilities\") pod \"f1c533ac-8395-4e69-b11b-406221295a3b\" (UID: \"f1c533ac-8395-4e69-b11b-406221295a3b\") " Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.693132 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-utilities" (OuterVolumeSpecName: "utilities") pod "f1c533ac-8395-4e69-b11b-406221295a3b" (UID: "f1c533ac-8395-4e69-b11b-406221295a3b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.695863 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.697333 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1c533ac-8395-4e69-b11b-406221295a3b-kube-api-access-ztvdf" (OuterVolumeSpecName: "kube-api-access-ztvdf") pod "f1c533ac-8395-4e69-b11b-406221295a3b" (UID: "f1c533ac-8395-4e69-b11b-406221295a3b"). InnerVolumeSpecName "kube-api-access-ztvdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.704941 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1c533ac-8395-4e69-b11b-406221295a3b" (UID: "f1c533ac-8395-4e69-b11b-406221295a3b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.797454 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztvdf\" (UniqueName: \"kubernetes.io/projected/f1c533ac-8395-4e69-b11b-406221295a3b-kube-api-access-ztvdf\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.797493 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1c533ac-8395-4e69-b11b-406221295a3b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:51 crc kubenswrapper[5081]: I1003 15:41:51.838199 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" path="/var/lib/kubelet/pods/595e397e-53d8-42aa-8fd6-f679fe525ba8/volumes" Oct 03 15:41:52 crc kubenswrapper[5081]: I1003 15:41:52.646375 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8g4x2" Oct 03 15:41:52 crc kubenswrapper[5081]: I1003 15:41:52.665939 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g4x2"] Oct 03 15:41:52 crc kubenswrapper[5081]: I1003 15:41:52.672399 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8g4x2"] Oct 03 15:41:52 crc kubenswrapper[5081]: I1003 15:41:52.932190 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.014822 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-util\") pod \"52892b90-1940-4bfa-8460-c3dce0452734\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.014903 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-bundle\") pod \"52892b90-1940-4bfa-8460-c3dce0452734\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.015014 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw62p\" (UniqueName: \"kubernetes.io/projected/52892b90-1940-4bfa-8460-c3dce0452734-kube-api-access-xw62p\") pod \"52892b90-1940-4bfa-8460-c3dce0452734\" (UID: \"52892b90-1940-4bfa-8460-c3dce0452734\") " Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.016737 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-bundle" (OuterVolumeSpecName: "bundle") pod "52892b90-1940-4bfa-8460-c3dce0452734" (UID: "52892b90-1940-4bfa-8460-c3dce0452734"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.018742 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52892b90-1940-4bfa-8460-c3dce0452734-kube-api-access-xw62p" (OuterVolumeSpecName: "kube-api-access-xw62p") pod "52892b90-1940-4bfa-8460-c3dce0452734" (UID: "52892b90-1940-4bfa-8460-c3dce0452734"). InnerVolumeSpecName "kube-api-access-xw62p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.036550 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-util" (OuterVolumeSpecName: "util") pod "52892b90-1940-4bfa-8460-c3dce0452734" (UID: "52892b90-1940-4bfa-8460-c3dce0452734"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.116552 5081 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-util\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.116610 5081 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/52892b90-1940-4bfa-8460-c3dce0452734-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.116619 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw62p\" (UniqueName: \"kubernetes.io/projected/52892b90-1940-4bfa-8460-c3dce0452734-kube-api-access-xw62p\") on node \"crc\" DevicePath \"\"" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.658115 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" event={"ID":"52892b90-1940-4bfa-8460-c3dce0452734","Type":"ContainerDied","Data":"83006f68d77f540855a575f84a1842a33a660ddba1d01e36db78ad24f1981b9e"} Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.658181 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83006f68d77f540855a575f84a1842a33a660ddba1d01e36db78ad24f1981b9e" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.658287 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t" Oct 03 15:41:53 crc kubenswrapper[5081]: I1003 15:41:53.837483 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" path="/var/lib/kubelet/pods/f1c533ac-8395-4e69-b11b-406221295a3b/volumes" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.685501 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5"] Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687266 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="extract-utilities" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687293 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="extract-utilities" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687334 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="registry-server" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687349 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="registry-server" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687376 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52892b90-1940-4bfa-8460-c3dce0452734" containerName="util" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687389 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="52892b90-1940-4bfa-8460-c3dce0452734" containerName="util" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687407 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="extract-content" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687417 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="extract-content" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687437 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87f6080d-917b-4d20-a744-9fb3bad43a77" containerName="console" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687444 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="87f6080d-917b-4d20-a744-9fb3bad43a77" containerName="console" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687459 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="extract-utilities" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687466 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="extract-utilities" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687481 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="registry-server" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687487 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="registry-server" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687498 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52892b90-1940-4bfa-8460-c3dce0452734" containerName="extract" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687507 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="52892b90-1940-4bfa-8460-c3dce0452734" containerName="extract" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687518 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52892b90-1940-4bfa-8460-c3dce0452734" containerName="pull" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687525 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="52892b90-1940-4bfa-8460-c3dce0452734" containerName="pull" Oct 03 15:42:00 crc kubenswrapper[5081]: E1003 15:42:00.687542 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="extract-content" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687549 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="extract-content" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687776 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="595e397e-53d8-42aa-8fd6-f679fe525ba8" containerName="registry-server" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687794 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="87f6080d-917b-4d20-a744-9fb3bad43a77" containerName="console" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687805 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1c533ac-8395-4e69-b11b-406221295a3b" containerName="registry-server" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.687816 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="52892b90-1940-4bfa-8460-c3dce0452734" containerName="extract" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.688476 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.693332 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.693802 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.694145 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.694422 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-hwx8t" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.695200 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.701658 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5"] Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.729152 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b0a63649-dc26-4b38-ba41-7b93349c2385-apiservice-cert\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.729250 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6ntm\" (UniqueName: \"kubernetes.io/projected/b0a63649-dc26-4b38-ba41-7b93349c2385-kube-api-access-z6ntm\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.729345 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b0a63649-dc26-4b38-ba41-7b93349c2385-webhook-cert\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.830318 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b0a63649-dc26-4b38-ba41-7b93349c2385-apiservice-cert\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.830383 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6ntm\" (UniqueName: \"kubernetes.io/projected/b0a63649-dc26-4b38-ba41-7b93349c2385-kube-api-access-z6ntm\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.830417 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b0a63649-dc26-4b38-ba41-7b93349c2385-webhook-cert\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.841694 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b0a63649-dc26-4b38-ba41-7b93349c2385-webhook-cert\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.856352 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b0a63649-dc26-4b38-ba41-7b93349c2385-apiservice-cert\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:00 crc kubenswrapper[5081]: I1003 15:42:00.874719 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6ntm\" (UniqueName: \"kubernetes.io/projected/b0a63649-dc26-4b38-ba41-7b93349c2385-kube-api-access-z6ntm\") pod \"metallb-operator-controller-manager-696555d8b4-p86s5\" (UID: \"b0a63649-dc26-4b38-ba41-7b93349c2385\") " pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.027432 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t"] Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.028419 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.030336 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-2s7vt" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.031714 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.032264 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.051314 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t"] Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.053055 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.134862 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-apiservice-cert\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.135379 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ccbf\" (UniqueName: \"kubernetes.io/projected/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-kube-api-access-5ccbf\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.135430 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-webhook-cert\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.237095 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-apiservice-cert\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.237153 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ccbf\" (UniqueName: \"kubernetes.io/projected/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-kube-api-access-5ccbf\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.237190 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-webhook-cert\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.242680 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-webhook-cert\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.242699 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-apiservice-cert\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.257393 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ccbf\" (UniqueName: \"kubernetes.io/projected/ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e-kube-api-access-5ccbf\") pod \"metallb-operator-webhook-server-656b449cbd-zcx5t\" (UID: \"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e\") " pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.345825 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.517275 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5"] Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.712887 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" event={"ID":"b0a63649-dc26-4b38-ba41-7b93349c2385","Type":"ContainerStarted","Data":"cb0b1878dc25d914b0c7b5ef11f45dd0829c0d137b4be750ffffb634b2295049"} Oct 03 15:42:01 crc kubenswrapper[5081]: I1003 15:42:01.792337 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t"] Oct 03 15:42:01 crc kubenswrapper[5081]: W1003 15:42:01.795058 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee37d6b3_b01e_4dcf_9c81_ba25b7860a7e.slice/crio-763ec9ae229269b944bced01da432f1557f8346896404bf1b8d773cddd16a0d6 WatchSource:0}: Error finding container 763ec9ae229269b944bced01da432f1557f8346896404bf1b8d773cddd16a0d6: Status 404 returned error can't find the container with id 763ec9ae229269b944bced01da432f1557f8346896404bf1b8d773cddd16a0d6 Oct 03 15:42:02 crc kubenswrapper[5081]: I1003 15:42:02.718246 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" event={"ID":"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e","Type":"ContainerStarted","Data":"763ec9ae229269b944bced01da432f1557f8346896404bf1b8d773cddd16a0d6"} Oct 03 15:42:07 crc kubenswrapper[5081]: I1003 15:42:07.769352 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" event={"ID":"ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e","Type":"ContainerStarted","Data":"5affb18c0e2e2b5d271410dc8ab32d81e51a2464da57cd3778c6d3ed7d138b95"} Oct 03 15:42:07 crc kubenswrapper[5081]: I1003 15:42:07.770957 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:07 crc kubenswrapper[5081]: I1003 15:42:07.776891 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" event={"ID":"b0a63649-dc26-4b38-ba41-7b93349c2385","Type":"ContainerStarted","Data":"546b36fabb8dda5e3f613cffaca1897c3481004109f6a62f9db5d9c96e81116a"} Oct 03 15:42:07 crc kubenswrapper[5081]: I1003 15:42:07.777718 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:07 crc kubenswrapper[5081]: I1003 15:42:07.810105 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" podStartSLOduration=1.362492923 podStartE2EDuration="6.81007777s" podCreationTimestamp="2025-10-03 15:42:01 +0000 UTC" firstStartedPulling="2025-10-03 15:42:01.798182316 +0000 UTC m=+840.763738939" lastFinishedPulling="2025-10-03 15:42:07.245767173 +0000 UTC m=+846.211323786" observedRunningTime="2025-10-03 15:42:07.804911901 +0000 UTC m=+846.770468524" watchObservedRunningTime="2025-10-03 15:42:07.81007777 +0000 UTC m=+846.775634383" Oct 03 15:42:07 crc kubenswrapper[5081]: I1003 15:42:07.831107 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" podStartSLOduration=3.868935111 podStartE2EDuration="7.830463367s" podCreationTimestamp="2025-10-03 15:42:00 +0000 UTC" firstStartedPulling="2025-10-03 15:42:01.535407395 +0000 UTC m=+840.500964018" lastFinishedPulling="2025-10-03 15:42:05.496935651 +0000 UTC m=+844.462492274" observedRunningTime="2025-10-03 15:42:07.828752598 +0000 UTC m=+846.794309231" watchObservedRunningTime="2025-10-03 15:42:07.830463367 +0000 UTC m=+846.796019990" Oct 03 15:42:21 crc kubenswrapper[5081]: I1003 15:42:21.359761 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-656b449cbd-zcx5t" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.057181 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-696555d8b4-p86s5" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.974657 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-gn4v6"] Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.976881 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.979553 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.979732 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-5244x"] Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.979752 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.979955 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-97w8j" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.980529 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.982374 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 03 15:42:41 crc kubenswrapper[5081]: I1003 15:42:41.999396 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-5244x"] Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055412 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/08b3556e-4f32-4c35-9074-711ed072ea2d-cert\") pod \"frr-k8s-webhook-server-64bf5d555-5244x\" (UID: \"08b3556e-4f32-4c35-9074-711ed072ea2d\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055492 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055523 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-startup\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055548 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-sockets\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055591 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-reloader\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055618 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-conf\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055661 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6qgp\" (UniqueName: \"kubernetes.io/projected/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-kube-api-access-x6qgp\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055695 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9qs7\" (UniqueName: \"kubernetes.io/projected/08b3556e-4f32-4c35-9074-711ed072ea2d-kube-api-access-q9qs7\") pod \"frr-k8s-webhook-server-64bf5d555-5244x\" (UID: \"08b3556e-4f32-4c35-9074-711ed072ea2d\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.055723 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics-certs\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.065633 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-5jcvg"] Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.066684 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.071184 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.071251 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.071346 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-5ckl6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.071376 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.109574 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-99p8w"] Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.110572 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.115387 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-99p8w"] Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.116486 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156553 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx5ms\" (UniqueName: \"kubernetes.io/projected/1d713435-3b4a-4464-8400-55b74becb2d2-kube-api-access-nx5ms\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156614 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1d713435-3b4a-4464-8400-55b74becb2d2-metrics-certs\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156642 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156660 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-startup\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156681 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-sockets\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156697 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-reloader\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156717 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-conf\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156737 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b0782857-7149-4abb-bf32-d50899f01453-metallb-excludel2\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156757 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-metrics-certs\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156789 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6qgp\" (UniqueName: \"kubernetes.io/projected/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-kube-api-access-x6qgp\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156813 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156833 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9qs7\" (UniqueName: \"kubernetes.io/projected/08b3556e-4f32-4c35-9074-711ed072ea2d-kube-api-access-q9qs7\") pod \"frr-k8s-webhook-server-64bf5d555-5244x\" (UID: \"08b3556e-4f32-4c35-9074-711ed072ea2d\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156852 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics-certs\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156875 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/08b3556e-4f32-4c35-9074-711ed072ea2d-cert\") pod \"frr-k8s-webhook-server-64bf5d555-5244x\" (UID: \"08b3556e-4f32-4c35-9074-711ed072ea2d\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156899 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmsfc\" (UniqueName: \"kubernetes.io/projected/b0782857-7149-4abb-bf32-d50899f01453-kube-api-access-rmsfc\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.156917 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1d713435-3b4a-4464-8400-55b74becb2d2-cert\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.157795 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-conf\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.157901 5081 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.157953 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics-certs podName:a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40 nodeName:}" failed. No retries permitted until 2025-10-03 15:42:42.657935665 +0000 UTC m=+881.623492278 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics-certs") pod "frr-k8s-gn4v6" (UID: "a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40") : secret "frr-k8s-certs-secret" not found Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.158069 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-sockets\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.158319 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-frr-startup\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.158394 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.159262 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-reloader\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.168238 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/08b3556e-4f32-4c35-9074-711ed072ea2d-cert\") pod \"frr-k8s-webhook-server-64bf5d555-5244x\" (UID: \"08b3556e-4f32-4c35-9074-711ed072ea2d\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.173404 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9qs7\" (UniqueName: \"kubernetes.io/projected/08b3556e-4f32-4c35-9074-711ed072ea2d-kube-api-access-q9qs7\") pod \"frr-k8s-webhook-server-64bf5d555-5244x\" (UID: \"08b3556e-4f32-4c35-9074-711ed072ea2d\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.180991 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6qgp\" (UniqueName: \"kubernetes.io/projected/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-kube-api-access-x6qgp\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.257760 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.257998 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmsfc\" (UniqueName: \"kubernetes.io/projected/b0782857-7149-4abb-bf32-d50899f01453-kube-api-access-rmsfc\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.258090 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1d713435-3b4a-4464-8400-55b74becb2d2-cert\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.258174 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx5ms\" (UniqueName: \"kubernetes.io/projected/1d713435-3b4a-4464-8400-55b74becb2d2-kube-api-access-nx5ms\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.258257 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1d713435-3b4a-4464-8400-55b74becb2d2-metrics-certs\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.258023 5081 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.258420 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b0782857-7149-4abb-bf32-d50899f01453-metallb-excludel2\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.258496 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist podName:b0782857-7149-4abb-bf32-d50899f01453 nodeName:}" failed. No retries permitted until 2025-10-03 15:42:42.758469021 +0000 UTC m=+881.724025634 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist") pod "speaker-5jcvg" (UID: "b0782857-7149-4abb-bf32-d50899f01453") : secret "metallb-memberlist" not found Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.258574 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-metrics-certs\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.258690 5081 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.258806 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-metrics-certs podName:b0782857-7149-4abb-bf32-d50899f01453 nodeName:}" failed. No retries permitted until 2025-10-03 15:42:42.75876063 +0000 UTC m=+881.724317243 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-metrics-certs") pod "speaker-5jcvg" (UID: "b0782857-7149-4abb-bf32-d50899f01453") : secret "speaker-certs-secret" not found Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.259958 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b0782857-7149-4abb-bf32-d50899f01453-metallb-excludel2\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.260203 5081 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.265281 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1d713435-3b4a-4464-8400-55b74becb2d2-metrics-certs\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.272082 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1d713435-3b4a-4464-8400-55b74becb2d2-cert\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.280916 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx5ms\" (UniqueName: \"kubernetes.io/projected/1d713435-3b4a-4464-8400-55b74becb2d2-kube-api-access-nx5ms\") pod \"controller-68d546b9d8-99p8w\" (UID: \"1d713435-3b4a-4464-8400-55b74becb2d2\") " pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.284142 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmsfc\" (UniqueName: \"kubernetes.io/projected/b0782857-7149-4abb-bf32-d50899f01453-kube-api-access-rmsfc\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.311742 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.423380 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.668486 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics-certs\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.682502 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40-metrics-certs\") pod \"frr-k8s-gn4v6\" (UID: \"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40\") " pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.723867 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-99p8w"] Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.769538 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.769706 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-metrics-certs\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.769710 5081 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 15:42:42 crc kubenswrapper[5081]: E1003 15:42:42.769800 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist podName:b0782857-7149-4abb-bf32-d50899f01453 nodeName:}" failed. No retries permitted until 2025-10-03 15:42:43.769773421 +0000 UTC m=+882.735330034 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist") pod "speaker-5jcvg" (UID: "b0782857-7149-4abb-bf32-d50899f01453") : secret "metallb-memberlist" not found Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.773473 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-metrics-certs\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.786340 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-5244x"] Oct 03 15:42:42 crc kubenswrapper[5081]: W1003 15:42:42.794392 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08b3556e_4f32_4c35_9074_711ed072ea2d.slice/crio-d122c66fc9492b775a949b1bd91f9605d1cca67b95f425e84e2dbd7cce82c3ef WatchSource:0}: Error finding container d122c66fc9492b775a949b1bd91f9605d1cca67b95f425e84e2dbd7cce82c3ef: Status 404 returned error can't find the container with id d122c66fc9492b775a949b1bd91f9605d1cca67b95f425e84e2dbd7cce82c3ef Oct 03 15:42:42 crc kubenswrapper[5081]: I1003 15:42:42.904688 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.033484 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" event={"ID":"08b3556e-4f32-4c35-9074-711ed072ea2d","Type":"ContainerStarted","Data":"d122c66fc9492b775a949b1bd91f9605d1cca67b95f425e84e2dbd7cce82c3ef"} Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.035136 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-99p8w" event={"ID":"1d713435-3b4a-4464-8400-55b74becb2d2","Type":"ContainerStarted","Data":"7398546acb188d61b90e7a92cc52f9763b6b3021b32819f3832196b6a2aab30f"} Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.035188 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-99p8w" event={"ID":"1d713435-3b4a-4464-8400-55b74becb2d2","Type":"ContainerStarted","Data":"1dbe693a3f20cc762b1fbc845188e34495f616d1f0b40faeea7fd5664b7bb633"} Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.035199 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-99p8w" event={"ID":"1d713435-3b4a-4464-8400-55b74becb2d2","Type":"ContainerStarted","Data":"8b47a95c9f5c0e1661b5c98309bea5d0fe2b10df12b94c59ce71f725fca60ff6"} Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.035306 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.057422 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-99p8w" podStartSLOduration=1.057395187 podStartE2EDuration="1.057395187s" podCreationTimestamp="2025-10-03 15:42:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:42:43.05576791 +0000 UTC m=+882.021324523" watchObservedRunningTime="2025-10-03 15:42:43.057395187 +0000 UTC m=+882.022951800" Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.791039 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.796359 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b0782857-7149-4abb-bf32-d50899f01453-memberlist\") pod \"speaker-5jcvg\" (UID: \"b0782857-7149-4abb-bf32-d50899f01453\") " pod="metallb-system/speaker-5jcvg" Oct 03 15:42:43 crc kubenswrapper[5081]: I1003 15:42:43.884255 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-5jcvg" Oct 03 15:42:43 crc kubenswrapper[5081]: W1003 15:42:43.922751 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0782857_7149_4abb_bf32_d50899f01453.slice/crio-fff6f1014fd5b4eeb0258ab9239d66b7456177139312da0aff859dc42d0e8269 WatchSource:0}: Error finding container fff6f1014fd5b4eeb0258ab9239d66b7456177139312da0aff859dc42d0e8269: Status 404 returned error can't find the container with id fff6f1014fd5b4eeb0258ab9239d66b7456177139312da0aff859dc42d0e8269 Oct 03 15:42:44 crc kubenswrapper[5081]: I1003 15:42:44.043997 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerStarted","Data":"c76b30982252952a2837ae9cbe7a8ea19826ac399de7f3155627a53bb7643f9b"} Oct 03 15:42:44 crc kubenswrapper[5081]: I1003 15:42:44.045061 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-5jcvg" event={"ID":"b0782857-7149-4abb-bf32-d50899f01453","Type":"ContainerStarted","Data":"fff6f1014fd5b4eeb0258ab9239d66b7456177139312da0aff859dc42d0e8269"} Oct 03 15:42:45 crc kubenswrapper[5081]: I1003 15:42:45.068656 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-5jcvg" event={"ID":"b0782857-7149-4abb-bf32-d50899f01453","Type":"ContainerStarted","Data":"1e8210afe615ea25ae46909377317e264732a7dcb200a866a7b5103e42a136e3"} Oct 03 15:42:45 crc kubenswrapper[5081]: I1003 15:42:45.069134 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-5jcvg" event={"ID":"b0782857-7149-4abb-bf32-d50899f01453","Type":"ContainerStarted","Data":"f2114d7bb0201e81d5a65c85a35d251ca2dcc02e01ce964cee4d5ef163ac9647"} Oct 03 15:42:45 crc kubenswrapper[5081]: I1003 15:42:45.069196 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-5jcvg" Oct 03 15:42:45 crc kubenswrapper[5081]: I1003 15:42:45.097649 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-5jcvg" podStartSLOduration=3.097626913 podStartE2EDuration="3.097626913s" podCreationTimestamp="2025-10-03 15:42:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:42:45.092628199 +0000 UTC m=+884.058184812" watchObservedRunningTime="2025-10-03 15:42:45.097626913 +0000 UTC m=+884.063183526" Oct 03 15:42:51 crc kubenswrapper[5081]: I1003 15:42:51.112734 5081 generic.go:334] "Generic (PLEG): container finished" podID="a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40" containerID="cc26336d5d066dc21a3ff075286a92efee1fdbca7fdb10c0627cc20a2bd21143" exitCode=0 Oct 03 15:42:51 crc kubenswrapper[5081]: I1003 15:42:51.112858 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerDied","Data":"cc26336d5d066dc21a3ff075286a92efee1fdbca7fdb10c0627cc20a2bd21143"} Oct 03 15:42:51 crc kubenswrapper[5081]: I1003 15:42:51.116532 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" event={"ID":"08b3556e-4f32-4c35-9074-711ed072ea2d","Type":"ContainerStarted","Data":"5dfd9e880fd44dd089e7bf34f52c26c3a17bd4a7fc95ffd953eeec6e41dcf76e"} Oct 03 15:42:51 crc kubenswrapper[5081]: I1003 15:42:51.116739 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:42:52 crc kubenswrapper[5081]: I1003 15:42:52.124124 5081 generic.go:334] "Generic (PLEG): container finished" podID="a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40" containerID="c47bca7b28d3470c39be20b982d558dcf13da54002a0944dd0aadc6522e0de38" exitCode=0 Oct 03 15:42:52 crc kubenswrapper[5081]: I1003 15:42:52.124220 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerDied","Data":"c47bca7b28d3470c39be20b982d558dcf13da54002a0944dd0aadc6522e0de38"} Oct 03 15:42:52 crc kubenswrapper[5081]: I1003 15:42:52.148018 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" podStartSLOduration=3.645847857 podStartE2EDuration="11.147986883s" podCreationTimestamp="2025-10-03 15:42:41 +0000 UTC" firstStartedPulling="2025-10-03 15:42:42.797810689 +0000 UTC m=+881.763367302" lastFinishedPulling="2025-10-03 15:42:50.299949715 +0000 UTC m=+889.265506328" observedRunningTime="2025-10-03 15:42:51.155945365 +0000 UTC m=+890.121501988" watchObservedRunningTime="2025-10-03 15:42:52.147986883 +0000 UTC m=+891.113543496" Oct 03 15:42:52 crc kubenswrapper[5081]: I1003 15:42:52.429838 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-99p8w" Oct 03 15:42:53 crc kubenswrapper[5081]: I1003 15:42:53.132002 5081 generic.go:334] "Generic (PLEG): container finished" podID="a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40" containerID="ee4bd1be9810c6b9c8245aa793f6eac1dd4d26549cd1755bee9306fb8fb6cb5e" exitCode=0 Oct 03 15:42:53 crc kubenswrapper[5081]: I1003 15:42:53.132054 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerDied","Data":"ee4bd1be9810c6b9c8245aa793f6eac1dd4d26549cd1755bee9306fb8fb6cb5e"} Oct 03 15:42:54 crc kubenswrapper[5081]: I1003 15:42:54.143232 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerStarted","Data":"4ce10ca1bcc4cb58053d6459b7e4ec8805883085437f4a8ceb5942036ce52657"} Oct 03 15:42:54 crc kubenswrapper[5081]: I1003 15:42:54.143675 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerStarted","Data":"31e04fd731a2d950a6c7c73e8acc5cddc83aa20cb4e8a3bf8738928b6643085d"} Oct 03 15:42:54 crc kubenswrapper[5081]: I1003 15:42:54.143692 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerStarted","Data":"0a4908c34a79afe0e6a7945b300e47f8495b27f079e041a4c77182b89bd149a4"} Oct 03 15:42:54 crc kubenswrapper[5081]: I1003 15:42:54.143707 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerStarted","Data":"847dbc9a5f234371438c1f013c3a985e6374039808020850a5555ecfa647e46c"} Oct 03 15:42:54 crc kubenswrapper[5081]: I1003 15:42:54.143717 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerStarted","Data":"84cbe62dacc12170e8f320b01d9bdc351899278ec38f077e13fdcef7c2627e29"} Oct 03 15:42:55 crc kubenswrapper[5081]: I1003 15:42:55.155314 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gn4v6" event={"ID":"a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40","Type":"ContainerStarted","Data":"fd67885ee4df690e2797490602590cc99f0798772040324d9d7d12daa109e85d"} Oct 03 15:42:55 crc kubenswrapper[5081]: I1003 15:42:55.155835 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:55 crc kubenswrapper[5081]: I1003 15:42:55.202845 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-gn4v6" podStartSLOduration=6.92102311 podStartE2EDuration="14.202822888s" podCreationTimestamp="2025-10-03 15:42:41 +0000 UTC" firstStartedPulling="2025-10-03 15:42:43.040105759 +0000 UTC m=+882.005662372" lastFinishedPulling="2025-10-03 15:42:50.321905527 +0000 UTC m=+889.287462150" observedRunningTime="2025-10-03 15:42:55.202358015 +0000 UTC m=+894.167914638" watchObservedRunningTime="2025-10-03 15:42:55.202822888 +0000 UTC m=+894.168379501" Oct 03 15:42:57 crc kubenswrapper[5081]: I1003 15:42:57.905603 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:42:57 crc kubenswrapper[5081]: I1003 15:42:57.946096 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:43:02 crc kubenswrapper[5081]: I1003 15:43:02.319254 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-5244x" Oct 03 15:43:03 crc kubenswrapper[5081]: I1003 15:43:03.890051 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-5jcvg" Oct 03 15:43:05 crc kubenswrapper[5081]: I1003 15:43:05.993666 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh"] Oct 03 15:43:05 crc kubenswrapper[5081]: I1003 15:43:05.995685 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:05 crc kubenswrapper[5081]: I1003 15:43:05.998798 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.011518 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh"] Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.107489 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p7v2\" (UniqueName: \"kubernetes.io/projected/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-kube-api-access-7p7v2\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.107697 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.107924 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.209187 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.209263 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.209373 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p7v2\" (UniqueName: \"kubernetes.io/projected/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-kube-api-access-7p7v2\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.209963 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.210022 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.230899 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p7v2\" (UniqueName: \"kubernetes.io/projected/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-kube-api-access-7p7v2\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.328320 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:06 crc kubenswrapper[5081]: I1003 15:43:06.595026 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh"] Oct 03 15:43:06 crc kubenswrapper[5081]: W1003 15:43:06.604598 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30b6aba5_eb9f_4f4e_aac3_cfeb3966bdc9.slice/crio-be48724c018436d4c49b71dfce5fcaecdd58d3625b762013af77546f865768e2 WatchSource:0}: Error finding container be48724c018436d4c49b71dfce5fcaecdd58d3625b762013af77546f865768e2: Status 404 returned error can't find the container with id be48724c018436d4c49b71dfce5fcaecdd58d3625b762013af77546f865768e2 Oct 03 15:43:07 crc kubenswrapper[5081]: I1003 15:43:07.262871 5081 generic.go:334] "Generic (PLEG): container finished" podID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerID="c7cd92f96e8f890a9969bce58746c430500ab6677a39a63ed9a09129c78b7b36" exitCode=0 Oct 03 15:43:07 crc kubenswrapper[5081]: I1003 15:43:07.262950 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" event={"ID":"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9","Type":"ContainerDied","Data":"c7cd92f96e8f890a9969bce58746c430500ab6677a39a63ed9a09129c78b7b36"} Oct 03 15:43:07 crc kubenswrapper[5081]: I1003 15:43:07.263335 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" event={"ID":"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9","Type":"ContainerStarted","Data":"be48724c018436d4c49b71dfce5fcaecdd58d3625b762013af77546f865768e2"} Oct 03 15:43:11 crc kubenswrapper[5081]: I1003 15:43:11.291225 5081 generic.go:334] "Generic (PLEG): container finished" podID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerID="aa1ce5142f6789bb76aa2e8112b13c4bf90b8b7a02e4f521929421b034d466ea" exitCode=0 Oct 03 15:43:11 crc kubenswrapper[5081]: I1003 15:43:11.291323 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" event={"ID":"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9","Type":"ContainerDied","Data":"aa1ce5142f6789bb76aa2e8112b13c4bf90b8b7a02e4f521929421b034d466ea"} Oct 03 15:43:12 crc kubenswrapper[5081]: I1003 15:43:12.300577 5081 generic.go:334] "Generic (PLEG): container finished" podID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerID="9292fb1b5dd6caffe56d352439a89d0bb7885528ec4f7e86ca00339f28a4b4e2" exitCode=0 Oct 03 15:43:12 crc kubenswrapper[5081]: I1003 15:43:12.300637 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" event={"ID":"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9","Type":"ContainerDied","Data":"9292fb1b5dd6caffe56d352439a89d0bb7885528ec4f7e86ca00339f28a4b4e2"} Oct 03 15:43:12 crc kubenswrapper[5081]: I1003 15:43:12.910416 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-gn4v6" Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.582723 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.731244 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p7v2\" (UniqueName: \"kubernetes.io/projected/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-kube-api-access-7p7v2\") pod \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.731442 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-bundle\") pod \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.731658 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-util\") pod \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\" (UID: \"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9\") " Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.733239 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-bundle" (OuterVolumeSpecName: "bundle") pod "30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" (UID: "30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.739243 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-kube-api-access-7p7v2" (OuterVolumeSpecName: "kube-api-access-7p7v2") pod "30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" (UID: "30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9"). InnerVolumeSpecName "kube-api-access-7p7v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.741966 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-util" (OuterVolumeSpecName: "util") pod "30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" (UID: "30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.836414 5081 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-util\") on node \"crc\" DevicePath \"\"" Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.836461 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p7v2\" (UniqueName: \"kubernetes.io/projected/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-kube-api-access-7p7v2\") on node \"crc\" DevicePath \"\"" Oct 03 15:43:13 crc kubenswrapper[5081]: I1003 15:43:13.836475 5081 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:43:14 crc kubenswrapper[5081]: I1003 15:43:14.316177 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" event={"ID":"30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9","Type":"ContainerDied","Data":"be48724c018436d4c49b71dfce5fcaecdd58d3625b762013af77546f865768e2"} Oct 03 15:43:14 crc kubenswrapper[5081]: I1003 15:43:14.316231 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be48724c018436d4c49b71dfce5fcaecdd58d3625b762013af77546f865768e2" Oct 03 15:43:14 crc kubenswrapper[5081]: I1003 15:43:14.316295 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.194712 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz"] Oct 03 15:43:20 crc kubenswrapper[5081]: E1003 15:43:20.195475 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerName="pull" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.195489 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerName="pull" Oct 03 15:43:20 crc kubenswrapper[5081]: E1003 15:43:20.195503 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerName="extract" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.195510 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerName="extract" Oct 03 15:43:20 crc kubenswrapper[5081]: E1003 15:43:20.195525 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerName="util" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.195530 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerName="util" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.195658 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9" containerName="extract" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.196131 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.201746 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.205140 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.207484 5081 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-8fhfw" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.223449 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz"] Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.328889 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdd4d\" (UniqueName: \"kubernetes.io/projected/ac9680e7-7c4a-40b7-b720-34a1b4b728e0-kube-api-access-wdd4d\") pod \"cert-manager-operator-controller-manager-57cd46d6d-vntlz\" (UID: \"ac9680e7-7c4a-40b7-b720-34a1b4b728e0\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.430291 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdd4d\" (UniqueName: \"kubernetes.io/projected/ac9680e7-7c4a-40b7-b720-34a1b4b728e0-kube-api-access-wdd4d\") pod \"cert-manager-operator-controller-manager-57cd46d6d-vntlz\" (UID: \"ac9680e7-7c4a-40b7-b720-34a1b4b728e0\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.453840 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdd4d\" (UniqueName: \"kubernetes.io/projected/ac9680e7-7c4a-40b7-b720-34a1b4b728e0-kube-api-access-wdd4d\") pod \"cert-manager-operator-controller-manager-57cd46d6d-vntlz\" (UID: \"ac9680e7-7c4a-40b7-b720-34a1b4b728e0\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.510929 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" Oct 03 15:43:20 crc kubenswrapper[5081]: I1003 15:43:20.753202 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz"] Oct 03 15:43:21 crc kubenswrapper[5081]: I1003 15:43:21.359946 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" event={"ID":"ac9680e7-7c4a-40b7-b720-34a1b4b728e0","Type":"ContainerStarted","Data":"cb11734577f2fc2543223e43dee1996937ee41e3b5346adc22e23f746eb42374"} Oct 03 15:43:28 crc kubenswrapper[5081]: I1003 15:43:28.435549 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" event={"ID":"ac9680e7-7c4a-40b7-b720-34a1b4b728e0","Type":"ContainerStarted","Data":"9b25069be7e2fcc8a1e16116d4dff19824520a526efd53c646b1b8c46ea559f8"} Oct 03 15:43:28 crc kubenswrapper[5081]: I1003 15:43:28.459213 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-vntlz" podStartSLOduration=1.2760302110000001 podStartE2EDuration="8.459184137s" podCreationTimestamp="2025-10-03 15:43:20 +0000 UTC" firstStartedPulling="2025-10-03 15:43:20.776479244 +0000 UTC m=+919.742035857" lastFinishedPulling="2025-10-03 15:43:27.95963317 +0000 UTC m=+926.925189783" observedRunningTime="2025-10-03 15:43:28.454676487 +0000 UTC m=+927.420233100" watchObservedRunningTime="2025-10-03 15:43:28.459184137 +0000 UTC m=+927.424740750" Oct 03 15:43:30 crc kubenswrapper[5081]: I1003 15:43:30.648062 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:43:30 crc kubenswrapper[5081]: I1003 15:43:30.648449 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.495290 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-sckqg"] Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.496683 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.499751 5081 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-xpfnc" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.500257 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.500528 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.515917 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-sckqg"] Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.653705 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brhw6\" (UniqueName: \"kubernetes.io/projected/fc3d77b5-f460-4dfe-b077-fd580b46f949-kube-api-access-brhw6\") pod \"cert-manager-webhook-d969966f-sckqg\" (UID: \"fc3d77b5-f460-4dfe-b077-fd580b46f949\") " pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.653850 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fc3d77b5-f460-4dfe-b077-fd580b46f949-bound-sa-token\") pod \"cert-manager-webhook-d969966f-sckqg\" (UID: \"fc3d77b5-f460-4dfe-b077-fd580b46f949\") " pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.755720 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fc3d77b5-f460-4dfe-b077-fd580b46f949-bound-sa-token\") pod \"cert-manager-webhook-d969966f-sckqg\" (UID: \"fc3d77b5-f460-4dfe-b077-fd580b46f949\") " pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.755820 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brhw6\" (UniqueName: \"kubernetes.io/projected/fc3d77b5-f460-4dfe-b077-fd580b46f949-kube-api-access-brhw6\") pod \"cert-manager-webhook-d969966f-sckqg\" (UID: \"fc3d77b5-f460-4dfe-b077-fd580b46f949\") " pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.782920 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brhw6\" (UniqueName: \"kubernetes.io/projected/fc3d77b5-f460-4dfe-b077-fd580b46f949-kube-api-access-brhw6\") pod \"cert-manager-webhook-d969966f-sckqg\" (UID: \"fc3d77b5-f460-4dfe-b077-fd580b46f949\") " pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.783421 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fc3d77b5-f460-4dfe-b077-fd580b46f949-bound-sa-token\") pod \"cert-manager-webhook-d969966f-sckqg\" (UID: \"fc3d77b5-f460-4dfe-b077-fd580b46f949\") " pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:32 crc kubenswrapper[5081]: I1003 15:43:32.815130 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:33 crc kubenswrapper[5081]: I1003 15:43:33.081619 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-sckqg"] Oct 03 15:43:33 crc kubenswrapper[5081]: I1003 15:43:33.475034 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-sckqg" event={"ID":"fc3d77b5-f460-4dfe-b077-fd580b46f949","Type":"ContainerStarted","Data":"d276d7989f8d3ffebf7814f8b20742a4416b86804894a12252cf666f2d986387"} Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.469280 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt"] Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.470934 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.473976 5081 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-98nx9" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.477646 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt"] Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.578729 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfz9d\" (UniqueName: \"kubernetes.io/projected/a4333955-423d-4f67-9863-023ead4ffc0b-kube-api-access-mfz9d\") pod \"cert-manager-cainjector-7d9f95dbf-xf8jt\" (UID: \"a4333955-423d-4f67-9863-023ead4ffc0b\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.578792 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4333955-423d-4f67-9863-023ead4ffc0b-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xf8jt\" (UID: \"a4333955-423d-4f67-9863-023ead4ffc0b\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.680027 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfz9d\" (UniqueName: \"kubernetes.io/projected/a4333955-423d-4f67-9863-023ead4ffc0b-kube-api-access-mfz9d\") pod \"cert-manager-cainjector-7d9f95dbf-xf8jt\" (UID: \"a4333955-423d-4f67-9863-023ead4ffc0b\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.680085 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4333955-423d-4f67-9863-023ead4ffc0b-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xf8jt\" (UID: \"a4333955-423d-4f67-9863-023ead4ffc0b\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.703910 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4333955-423d-4f67-9863-023ead4ffc0b-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xf8jt\" (UID: \"a4333955-423d-4f67-9863-023ead4ffc0b\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.704880 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfz9d\" (UniqueName: \"kubernetes.io/projected/a4333955-423d-4f67-9863-023ead4ffc0b-kube-api-access-mfz9d\") pod \"cert-manager-cainjector-7d9f95dbf-xf8jt\" (UID: \"a4333955-423d-4f67-9863-023ead4ffc0b\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:34 crc kubenswrapper[5081]: I1003 15:43:34.805896 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" Oct 03 15:43:35 crc kubenswrapper[5081]: I1003 15:43:35.261872 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt"] Oct 03 15:43:35 crc kubenswrapper[5081]: I1003 15:43:35.493381 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" event={"ID":"a4333955-423d-4f67-9863-023ead4ffc0b","Type":"ContainerStarted","Data":"4dabbb520a2ced08b6c3180cca1f58ca997a8f7576aca9fd710deee87bad9f79"} Oct 03 15:43:37 crc kubenswrapper[5081]: I1003 15:43:37.509105 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-sckqg" event={"ID":"fc3d77b5-f460-4dfe-b077-fd580b46f949","Type":"ContainerStarted","Data":"1738bc8ed85dbd627ced0c8341d1a9c5de7d2937e97fb47e4373c9555631e077"} Oct 03 15:43:37 crc kubenswrapper[5081]: I1003 15:43:37.509677 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:37 crc kubenswrapper[5081]: I1003 15:43:37.513045 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" event={"ID":"a4333955-423d-4f67-9863-023ead4ffc0b","Type":"ContainerStarted","Data":"1dbf31117c7d067017c3254a063f7ada89712e24076388c1615b79c88287bbfb"} Oct 03 15:43:37 crc kubenswrapper[5081]: I1003 15:43:37.530164 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-sckqg" podStartSLOduration=1.355939287 podStartE2EDuration="5.530139832s" podCreationTimestamp="2025-10-03 15:43:32 +0000 UTC" firstStartedPulling="2025-10-03 15:43:33.094865963 +0000 UTC m=+932.060422576" lastFinishedPulling="2025-10-03 15:43:37.269066498 +0000 UTC m=+936.234623121" observedRunningTime="2025-10-03 15:43:37.529448942 +0000 UTC m=+936.495005565" watchObservedRunningTime="2025-10-03 15:43:37.530139832 +0000 UTC m=+936.495696445" Oct 03 15:43:37 crc kubenswrapper[5081]: I1003 15:43:37.552892 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xf8jt" podStartSLOduration=1.560644328 podStartE2EDuration="3.552872247s" podCreationTimestamp="2025-10-03 15:43:34 +0000 UTC" firstStartedPulling="2025-10-03 15:43:35.282707628 +0000 UTC m=+934.248264241" lastFinishedPulling="2025-10-03 15:43:37.274935547 +0000 UTC m=+936.240492160" observedRunningTime="2025-10-03 15:43:37.548539302 +0000 UTC m=+936.514095935" watchObservedRunningTime="2025-10-03 15:43:37.552872247 +0000 UTC m=+936.518428860" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.501284 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sdq74"] Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.503337 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.519503 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sdq74"] Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.651740 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-utilities\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.652256 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-catalog-content\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.652396 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bhmr\" (UniqueName: \"kubernetes.io/projected/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-kube-api-access-8bhmr\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.754571 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bhmr\" (UniqueName: \"kubernetes.io/projected/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-kube-api-access-8bhmr\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.754668 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-utilities\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.754712 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-catalog-content\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.755233 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-catalog-content\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.755366 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-utilities\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.787010 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bhmr\" (UniqueName: \"kubernetes.io/projected/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-kube-api-access-8bhmr\") pod \"certified-operators-sdq74\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:38 crc kubenswrapper[5081]: I1003 15:43:38.827629 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:39 crc kubenswrapper[5081]: I1003 15:43:39.359394 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sdq74"] Oct 03 15:43:39 crc kubenswrapper[5081]: W1003 15:43:39.371967 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9391d1b6_c6e5_4afb_ae9d_16d975d05c7f.slice/crio-1d3eaf319fa2382b7df3a79a17e84fbe6e47ff2bc975006a996dc0dab39a87f5 WatchSource:0}: Error finding container 1d3eaf319fa2382b7df3a79a17e84fbe6e47ff2bc975006a996dc0dab39a87f5: Status 404 returned error can't find the container with id 1d3eaf319fa2382b7df3a79a17e84fbe6e47ff2bc975006a996dc0dab39a87f5 Oct 03 15:43:39 crc kubenswrapper[5081]: I1003 15:43:39.561079 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sdq74" event={"ID":"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f","Type":"ContainerStarted","Data":"1d3eaf319fa2382b7df3a79a17e84fbe6e47ff2bc975006a996dc0dab39a87f5"} Oct 03 15:43:40 crc kubenswrapper[5081]: I1003 15:43:40.570650 5081 generic.go:334] "Generic (PLEG): container finished" podID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerID="05013317ee78c51b8e743017b8329b85479fa8fd678aa94be2727b5ce8e322da" exitCode=0 Oct 03 15:43:40 crc kubenswrapper[5081]: I1003 15:43:40.570804 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sdq74" event={"ID":"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f","Type":"ContainerDied","Data":"05013317ee78c51b8e743017b8329b85479fa8fd678aa94be2727b5ce8e322da"} Oct 03 15:43:41 crc kubenswrapper[5081]: I1003 15:43:41.581723 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sdq74" event={"ID":"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f","Type":"ContainerStarted","Data":"eaa0b336def631aedc9472cfa013ca7c65165c61d165fa427d1e0c8316046871"} Oct 03 15:43:42 crc kubenswrapper[5081]: I1003 15:43:42.592044 5081 generic.go:334] "Generic (PLEG): container finished" podID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerID="eaa0b336def631aedc9472cfa013ca7c65165c61d165fa427d1e0c8316046871" exitCode=0 Oct 03 15:43:42 crc kubenswrapper[5081]: I1003 15:43:42.592119 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sdq74" event={"ID":"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f","Type":"ContainerDied","Data":"eaa0b336def631aedc9472cfa013ca7c65165c61d165fa427d1e0c8316046871"} Oct 03 15:43:42 crc kubenswrapper[5081]: I1003 15:43:42.819860 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-sckqg" Oct 03 15:43:48 crc kubenswrapper[5081]: I1003 15:43:48.645478 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sdq74" event={"ID":"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f","Type":"ContainerStarted","Data":"0c6c97dd4c1f697480e2194f1902be3ba1b0a5c62c735b4d9e360cb889861b11"} Oct 03 15:43:48 crc kubenswrapper[5081]: I1003 15:43:48.672313 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sdq74" podStartSLOduration=3.8199522679999998 podStartE2EDuration="10.672254218s" podCreationTimestamp="2025-10-03 15:43:38 +0000 UTC" firstStartedPulling="2025-10-03 15:43:40.573721691 +0000 UTC m=+939.539278304" lastFinishedPulling="2025-10-03 15:43:47.426023651 +0000 UTC m=+946.391580254" observedRunningTime="2025-10-03 15:43:48.670895849 +0000 UTC m=+947.636452482" watchObservedRunningTime="2025-10-03 15:43:48.672254218 +0000 UTC m=+947.637810871" Oct 03 15:43:48 crc kubenswrapper[5081]: I1003 15:43:48.828735 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:48 crc kubenswrapper[5081]: I1003 15:43:48.828788 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:49 crc kubenswrapper[5081]: I1003 15:43:49.883170 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-sdq74" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="registry-server" probeResult="failure" output=< Oct 03 15:43:49 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 15:43:49 crc kubenswrapper[5081]: > Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.133002 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-q4mtf"] Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.135715 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.138421 5081 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-t6csr" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.139716 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-q4mtf"] Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.239323 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcxhn\" (UniqueName: \"kubernetes.io/projected/a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8-kube-api-access-rcxhn\") pod \"cert-manager-7d4cc89fcb-q4mtf\" (UID: \"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8\") " pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.239391 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-q4mtf\" (UID: \"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8\") " pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.341048 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcxhn\" (UniqueName: \"kubernetes.io/projected/a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8-kube-api-access-rcxhn\") pod \"cert-manager-7d4cc89fcb-q4mtf\" (UID: \"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8\") " pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.341151 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-q4mtf\" (UID: \"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8\") " pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.367777 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-q4mtf\" (UID: \"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8\") " pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.369478 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcxhn\" (UniqueName: \"kubernetes.io/projected/a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8-kube-api-access-rcxhn\") pod \"cert-manager-7d4cc89fcb-q4mtf\" (UID: \"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8\") " pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.472700 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" Oct 03 15:43:50 crc kubenswrapper[5081]: I1003 15:43:50.760024 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-q4mtf"] Oct 03 15:43:50 crc kubenswrapper[5081]: W1003 15:43:50.769432 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6c6ba02_ba1c_4143_a6a1_86b6c3b406f8.slice/crio-52f18acffeb20ce18f92f01f39e481b836c07c254e7e38219e63b8de70221f75 WatchSource:0}: Error finding container 52f18acffeb20ce18f92f01f39e481b836c07c254e7e38219e63b8de70221f75: Status 404 returned error can't find the container with id 52f18acffeb20ce18f92f01f39e481b836c07c254e7e38219e63b8de70221f75 Oct 03 15:43:51 crc kubenswrapper[5081]: I1003 15:43:51.700696 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" event={"ID":"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8","Type":"ContainerStarted","Data":"82535922dc14a9b42d88d945cb1ced68c1a413db90ad83dd902c03a2aa6a1cc7"} Oct 03 15:43:51 crc kubenswrapper[5081]: I1003 15:43:51.701133 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" event={"ID":"a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8","Type":"ContainerStarted","Data":"52f18acffeb20ce18f92f01f39e481b836c07c254e7e38219e63b8de70221f75"} Oct 03 15:43:51 crc kubenswrapper[5081]: I1003 15:43:51.732846 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-q4mtf" podStartSLOduration=1.732814768 podStartE2EDuration="1.732814768s" podCreationTimestamp="2025-10-03 15:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:43:51.722262833 +0000 UTC m=+950.687819436" watchObservedRunningTime="2025-10-03 15:43:51.732814768 +0000 UTC m=+950.698371381" Oct 03 15:43:56 crc kubenswrapper[5081]: I1003 15:43:56.870246 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jqhlc"] Oct 03 15:43:56 crc kubenswrapper[5081]: I1003 15:43:56.873037 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jqhlc" Oct 03 15:43:56 crc kubenswrapper[5081]: I1003 15:43:56.891635 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 03 15:43:56 crc kubenswrapper[5081]: I1003 15:43:56.898941 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jqhlc"] Oct 03 15:43:56 crc kubenswrapper[5081]: I1003 15:43:56.899603 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 03 15:43:56 crc kubenswrapper[5081]: I1003 15:43:56.900145 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-wtlb2" Oct 03 15:43:56 crc kubenswrapper[5081]: I1003 15:43:56.956790 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwsxt\" (UniqueName: \"kubernetes.io/projected/eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7-kube-api-access-hwsxt\") pod \"openstack-operator-index-jqhlc\" (UID: \"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7\") " pod="openstack-operators/openstack-operator-index-jqhlc" Oct 03 15:43:57 crc kubenswrapper[5081]: I1003 15:43:57.058617 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwsxt\" (UniqueName: \"kubernetes.io/projected/eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7-kube-api-access-hwsxt\") pod \"openstack-operator-index-jqhlc\" (UID: \"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7\") " pod="openstack-operators/openstack-operator-index-jqhlc" Oct 03 15:43:57 crc kubenswrapper[5081]: I1003 15:43:57.079243 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwsxt\" (UniqueName: \"kubernetes.io/projected/eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7-kube-api-access-hwsxt\") pod \"openstack-operator-index-jqhlc\" (UID: \"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7\") " pod="openstack-operators/openstack-operator-index-jqhlc" Oct 03 15:43:57 crc kubenswrapper[5081]: I1003 15:43:57.203109 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jqhlc" Oct 03 15:43:57 crc kubenswrapper[5081]: I1003 15:43:57.666856 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jqhlc"] Oct 03 15:43:57 crc kubenswrapper[5081]: I1003 15:43:57.746502 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jqhlc" event={"ID":"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7","Type":"ContainerStarted","Data":"e00ddeae9fee2eebd1c7284b15f8720d29a7285d0a3f0e6fdbc5c3dd7d36b507"} Oct 03 15:43:58 crc kubenswrapper[5081]: I1003 15:43:58.756822 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jqhlc" event={"ID":"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7","Type":"ContainerStarted","Data":"402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994"} Oct 03 15:43:58 crc kubenswrapper[5081]: I1003 15:43:58.779839 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jqhlc" podStartSLOduration=1.928333607 podStartE2EDuration="2.779815587s" podCreationTimestamp="2025-10-03 15:43:56 +0000 UTC" firstStartedPulling="2025-10-03 15:43:57.681579785 +0000 UTC m=+956.647136438" lastFinishedPulling="2025-10-03 15:43:58.533061805 +0000 UTC m=+957.498618418" observedRunningTime="2025-10-03 15:43:58.775728329 +0000 UTC m=+957.741284962" watchObservedRunningTime="2025-10-03 15:43:58.779815587 +0000 UTC m=+957.745372200" Oct 03 15:43:58 crc kubenswrapper[5081]: I1003 15:43:58.872666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:43:58 crc kubenswrapper[5081]: I1003 15:43:58.920663 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:44:00 crc kubenswrapper[5081]: I1003 15:44:00.624733 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jqhlc"] Oct 03 15:44:00 crc kubenswrapper[5081]: I1003 15:44:00.647833 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:44:00 crc kubenswrapper[5081]: I1003 15:44:00.647920 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:44:00 crc kubenswrapper[5081]: I1003 15:44:00.771461 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jqhlc" podUID="eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7" containerName="registry-server" containerID="cri-o://402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994" gracePeriod=2 Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.155807 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jqhlc" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.224758 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwsxt\" (UniqueName: \"kubernetes.io/projected/eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7-kube-api-access-hwsxt\") pod \"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7\" (UID: \"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7\") " Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.232770 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7-kube-api-access-hwsxt" (OuterVolumeSpecName: "kube-api-access-hwsxt") pod "eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7" (UID: "eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7"). InnerVolumeSpecName "kube-api-access-hwsxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.327250 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwsxt\" (UniqueName: \"kubernetes.io/projected/eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7-kube-api-access-hwsxt\") on node \"crc\" DevicePath \"\"" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.440645 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-z7szh"] Oct 03 15:44:01 crc kubenswrapper[5081]: E1003 15:44:01.441081 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7" containerName="registry-server" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.441101 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7" containerName="registry-server" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.441251 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7" containerName="registry-server" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.442041 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.448833 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-z7szh"] Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.532679 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brv6v\" (UniqueName: \"kubernetes.io/projected/adf64075-5998-49ce-99b2-a34e10a163ad-kube-api-access-brv6v\") pod \"openstack-operator-index-z7szh\" (UID: \"adf64075-5998-49ce-99b2-a34e10a163ad\") " pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.634950 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brv6v\" (UniqueName: \"kubernetes.io/projected/adf64075-5998-49ce-99b2-a34e10a163ad-kube-api-access-brv6v\") pod \"openstack-operator-index-z7szh\" (UID: \"adf64075-5998-49ce-99b2-a34e10a163ad\") " pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.672868 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brv6v\" (UniqueName: \"kubernetes.io/projected/adf64075-5998-49ce-99b2-a34e10a163ad-kube-api-access-brv6v\") pod \"openstack-operator-index-z7szh\" (UID: \"adf64075-5998-49ce-99b2-a34e10a163ad\") " pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.782552 5081 generic.go:334] "Generic (PLEG): container finished" podID="eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7" containerID="402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994" exitCode=0 Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.782663 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jqhlc" event={"ID":"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7","Type":"ContainerDied","Data":"402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994"} Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.782661 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jqhlc" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.782702 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jqhlc" event={"ID":"eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7","Type":"ContainerDied","Data":"e00ddeae9fee2eebd1c7284b15f8720d29a7285d0a3f0e6fdbc5c3dd7d36b507"} Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.782726 5081 scope.go:117] "RemoveContainer" containerID="402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.807172 5081 scope.go:117] "RemoveContainer" containerID="402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994" Oct 03 15:44:01 crc kubenswrapper[5081]: E1003 15:44:01.808527 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994\": container with ID starting with 402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994 not found: ID does not exist" containerID="402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.808586 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994"} err="failed to get container status \"402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994\": rpc error: code = NotFound desc = could not find container \"402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994\": container with ID starting with 402a39dcf9f2ac0118422eb6258a606f53fd2fec22fc2e01162adbf5ff917994 not found: ID does not exist" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.810299 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.847019 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jqhlc"] Oct 03 15:44:01 crc kubenswrapper[5081]: I1003 15:44:01.847063 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jqhlc"] Oct 03 15:44:02 crc kubenswrapper[5081]: I1003 15:44:02.274374 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-z7szh"] Oct 03 15:44:02 crc kubenswrapper[5081]: I1003 15:44:02.799749 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z7szh" event={"ID":"adf64075-5998-49ce-99b2-a34e10a163ad","Type":"ContainerStarted","Data":"950dc82073ead226f4ce90883ab673bdafc65747519554fd954686c6d16c91da"} Oct 03 15:44:03 crc kubenswrapper[5081]: I1003 15:44:03.626948 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sdq74"] Oct 03 15:44:03 crc kubenswrapper[5081]: I1003 15:44:03.627749 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sdq74" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="registry-server" containerID="cri-o://0c6c97dd4c1f697480e2194f1902be3ba1b0a5c62c735b4d9e360cb889861b11" gracePeriod=2 Oct 03 15:44:03 crc kubenswrapper[5081]: I1003 15:44:03.814931 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z7szh" event={"ID":"adf64075-5998-49ce-99b2-a34e10a163ad","Type":"ContainerStarted","Data":"b2b3b9c64286a32a70ccad5a16d699b4a4e790cd35c200ba5477eb0e3b3db922"} Oct 03 15:44:03 crc kubenswrapper[5081]: I1003 15:44:03.824598 5081 generic.go:334] "Generic (PLEG): container finished" podID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerID="0c6c97dd4c1f697480e2194f1902be3ba1b0a5c62c735b4d9e360cb889861b11" exitCode=0 Oct 03 15:44:03 crc kubenswrapper[5081]: I1003 15:44:03.824668 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sdq74" event={"ID":"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f","Type":"ContainerDied","Data":"0c6c97dd4c1f697480e2194f1902be3ba1b0a5c62c735b4d9e360cb889861b11"} Oct 03 15:44:03 crc kubenswrapper[5081]: I1003 15:44:03.833031 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-z7szh" podStartSLOduration=2.383063966 podStartE2EDuration="2.833010544s" podCreationTimestamp="2025-10-03 15:44:01 +0000 UTC" firstStartedPulling="2025-10-03 15:44:02.28847672 +0000 UTC m=+961.254033353" lastFinishedPulling="2025-10-03 15:44:02.738423288 +0000 UTC m=+961.703979931" observedRunningTime="2025-10-03 15:44:03.831536431 +0000 UTC m=+962.797093044" watchObservedRunningTime="2025-10-03 15:44:03.833010544 +0000 UTC m=+962.798567157" Oct 03 15:44:03 crc kubenswrapper[5081]: I1003 15:44:03.844704 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7" path="/var/lib/kubelet/pods/eb9fa3f7-0d6b-4310-8e1c-bec2cfca86f7/volumes" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.024820 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.096175 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bhmr\" (UniqueName: \"kubernetes.io/projected/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-kube-api-access-8bhmr\") pod \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.096318 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-catalog-content\") pod \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.096372 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-utilities\") pod \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\" (UID: \"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f\") " Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.097394 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-utilities" (OuterVolumeSpecName: "utilities") pod "9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" (UID: "9391d1b6-c6e5-4afb-ae9d-16d975d05c7f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.111628 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-kube-api-access-8bhmr" (OuterVolumeSpecName: "kube-api-access-8bhmr") pod "9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" (UID: "9391d1b6-c6e5-4afb-ae9d-16d975d05c7f"). InnerVolumeSpecName "kube-api-access-8bhmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.153820 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" (UID: "9391d1b6-c6e5-4afb-ae9d-16d975d05c7f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.198492 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bhmr\" (UniqueName: \"kubernetes.io/projected/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-kube-api-access-8bhmr\") on node \"crc\" DevicePath \"\"" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.198536 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.198546 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.836165 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sdq74" event={"ID":"9391d1b6-c6e5-4afb-ae9d-16d975d05c7f","Type":"ContainerDied","Data":"1d3eaf319fa2382b7df3a79a17e84fbe6e47ff2bc975006a996dc0dab39a87f5"} Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.836214 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sdq74" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.836302 5081 scope.go:117] "RemoveContainer" containerID="0c6c97dd4c1f697480e2194f1902be3ba1b0a5c62c735b4d9e360cb889861b11" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.863748 5081 scope.go:117] "RemoveContainer" containerID="eaa0b336def631aedc9472cfa013ca7c65165c61d165fa427d1e0c8316046871" Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.877137 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sdq74"] Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.885629 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sdq74"] Oct 03 15:44:04 crc kubenswrapper[5081]: I1003 15:44:04.895667 5081 scope.go:117] "RemoveContainer" containerID="05013317ee78c51b8e743017b8329b85479fa8fd678aa94be2727b5ce8e322da" Oct 03 15:44:05 crc kubenswrapper[5081]: I1003 15:44:05.856413 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" path="/var/lib/kubelet/pods/9391d1b6-c6e5-4afb-ae9d-16d975d05c7f/volumes" Oct 03 15:44:11 crc kubenswrapper[5081]: I1003 15:44:11.811132 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:11 crc kubenswrapper[5081]: I1003 15:44:11.812401 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:11 crc kubenswrapper[5081]: I1003 15:44:11.868077 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:11 crc kubenswrapper[5081]: I1003 15:44:11.961327 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-z7szh" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.680444 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2"] Oct 03 15:44:18 crc kubenswrapper[5081]: E1003 15:44:18.682032 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="extract-utilities" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.682064 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="extract-utilities" Oct 03 15:44:18 crc kubenswrapper[5081]: E1003 15:44:18.682100 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="extract-content" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.682116 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="extract-content" Oct 03 15:44:18 crc kubenswrapper[5081]: E1003 15:44:18.682146 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="registry-server" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.682161 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="registry-server" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.682425 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="9391d1b6-c6e5-4afb-ae9d-16d975d05c7f" containerName="registry-server" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.684472 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.688583 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-p54n6" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.690625 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2"] Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.738344 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-util\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.738718 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h2r4\" (UniqueName: \"kubernetes.io/projected/35d957f7-e16f-4144-b31d-db3861d30081-kube-api-access-2h2r4\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.738853 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-bundle\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.840221 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h2r4\" (UniqueName: \"kubernetes.io/projected/35d957f7-e16f-4144-b31d-db3861d30081-kube-api-access-2h2r4\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.840320 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-bundle\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.840407 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-util\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.841382 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-bundle\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.841382 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-util\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:18 crc kubenswrapper[5081]: I1003 15:44:18.874728 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h2r4\" (UniqueName: \"kubernetes.io/projected/35d957f7-e16f-4144-b31d-db3861d30081-kube-api-access-2h2r4\") pod \"00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:19 crc kubenswrapper[5081]: I1003 15:44:19.012180 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:19 crc kubenswrapper[5081]: I1003 15:44:19.226487 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2"] Oct 03 15:44:19 crc kubenswrapper[5081]: I1003 15:44:19.984282 5081 generic.go:334] "Generic (PLEG): container finished" podID="35d957f7-e16f-4144-b31d-db3861d30081" containerID="2ca25c570a7ea366825ed57e9233c2ee874b3604ea894274f848d9fe62247c96" exitCode=0 Oct 03 15:44:19 crc kubenswrapper[5081]: I1003 15:44:19.984469 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" event={"ID":"35d957f7-e16f-4144-b31d-db3861d30081","Type":"ContainerDied","Data":"2ca25c570a7ea366825ed57e9233c2ee874b3604ea894274f848d9fe62247c96"} Oct 03 15:44:19 crc kubenswrapper[5081]: I1003 15:44:19.984933 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" event={"ID":"35d957f7-e16f-4144-b31d-db3861d30081","Type":"ContainerStarted","Data":"9131c00c3e7beb4eaccc56627e6b89dcedca29b374b241071e679770f39ef5c4"} Oct 03 15:44:20 crc kubenswrapper[5081]: I1003 15:44:20.993630 5081 generic.go:334] "Generic (PLEG): container finished" podID="35d957f7-e16f-4144-b31d-db3861d30081" containerID="32892e971e95576f98ec2f7ed736a4ebfdcb7cba94b6fc48aace88a4601c4ddc" exitCode=0 Oct 03 15:44:20 crc kubenswrapper[5081]: I1003 15:44:20.994049 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" event={"ID":"35d957f7-e16f-4144-b31d-db3861d30081","Type":"ContainerDied","Data":"32892e971e95576f98ec2f7ed736a4ebfdcb7cba94b6fc48aace88a4601c4ddc"} Oct 03 15:44:22 crc kubenswrapper[5081]: I1003 15:44:22.003100 5081 generic.go:334] "Generic (PLEG): container finished" podID="35d957f7-e16f-4144-b31d-db3861d30081" containerID="5596a31df98caa77f591e9f8a484dc30d70c9f2fce72058dc39f1bb9a683f0d9" exitCode=0 Oct 03 15:44:22 crc kubenswrapper[5081]: I1003 15:44:22.003158 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" event={"ID":"35d957f7-e16f-4144-b31d-db3861d30081","Type":"ContainerDied","Data":"5596a31df98caa77f591e9f8a484dc30d70c9f2fce72058dc39f1bb9a683f0d9"} Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.343633 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.417812 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h2r4\" (UniqueName: \"kubernetes.io/projected/35d957f7-e16f-4144-b31d-db3861d30081-kube-api-access-2h2r4\") pod \"35d957f7-e16f-4144-b31d-db3861d30081\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.418682 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-bundle\") pod \"35d957f7-e16f-4144-b31d-db3861d30081\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.418757 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-util\") pod \"35d957f7-e16f-4144-b31d-db3861d30081\" (UID: \"35d957f7-e16f-4144-b31d-db3861d30081\") " Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.419536 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-bundle" (OuterVolumeSpecName: "bundle") pod "35d957f7-e16f-4144-b31d-db3861d30081" (UID: "35d957f7-e16f-4144-b31d-db3861d30081"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.424722 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d957f7-e16f-4144-b31d-db3861d30081-kube-api-access-2h2r4" (OuterVolumeSpecName: "kube-api-access-2h2r4") pod "35d957f7-e16f-4144-b31d-db3861d30081" (UID: "35d957f7-e16f-4144-b31d-db3861d30081"). InnerVolumeSpecName "kube-api-access-2h2r4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.439797 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-util" (OuterVolumeSpecName: "util") pod "35d957f7-e16f-4144-b31d-db3861d30081" (UID: "35d957f7-e16f-4144-b31d-db3861d30081"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.521352 5081 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.521461 5081 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35d957f7-e16f-4144-b31d-db3861d30081-util\") on node \"crc\" DevicePath \"\"" Oct 03 15:44:23 crc kubenswrapper[5081]: I1003 15:44:23.521482 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h2r4\" (UniqueName: \"kubernetes.io/projected/35d957f7-e16f-4144-b31d-db3861d30081-kube-api-access-2h2r4\") on node \"crc\" DevicePath \"\"" Oct 03 15:44:24 crc kubenswrapper[5081]: I1003 15:44:24.022740 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" event={"ID":"35d957f7-e16f-4144-b31d-db3861d30081","Type":"ContainerDied","Data":"9131c00c3e7beb4eaccc56627e6b89dcedca29b374b241071e679770f39ef5c4"} Oct 03 15:44:24 crc kubenswrapper[5081]: I1003 15:44:24.022807 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9131c00c3e7beb4eaccc56627e6b89dcedca29b374b241071e679770f39ef5c4" Oct 03 15:44:24 crc kubenswrapper[5081]: I1003 15:44:24.022816 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2" Oct 03 15:44:30 crc kubenswrapper[5081]: I1003 15:44:30.647936 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:44:30 crc kubenswrapper[5081]: I1003 15:44:30.650457 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:44:30 crc kubenswrapper[5081]: I1003 15:44:30.650664 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:44:30 crc kubenswrapper[5081]: I1003 15:44:30.651685 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c5285c2706f0562112a34a4f95372325994d5f5da0f641a9bc545482a3b072b5"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:44:30 crc kubenswrapper[5081]: I1003 15:44:30.651861 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://c5285c2706f0562112a34a4f95372325994d5f5da0f641a9bc545482a3b072b5" gracePeriod=600 Oct 03 15:44:31 crc kubenswrapper[5081]: I1003 15:44:31.074105 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="c5285c2706f0562112a34a4f95372325994d5f5da0f641a9bc545482a3b072b5" exitCode=0 Oct 03 15:44:31 crc kubenswrapper[5081]: I1003 15:44:31.074184 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"c5285c2706f0562112a34a4f95372325994d5f5da0f641a9bc545482a3b072b5"} Oct 03 15:44:31 crc kubenswrapper[5081]: I1003 15:44:31.074693 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"2727e3568c0236fe42cc24253d506d8b40838dfef4a67a28b07229e9c0fbc979"} Oct 03 15:44:31 crc kubenswrapper[5081]: I1003 15:44:31.074722 5081 scope.go:117] "RemoveContainer" containerID="a97bc62e9210356568f624b3588b885554295304e0cb244f570d3f9d71dc1ba9" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.967866 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92"] Oct 03 15:44:45 crc kubenswrapper[5081]: E1003 15:44:45.968983 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d957f7-e16f-4144-b31d-db3861d30081" containerName="extract" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.969002 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d957f7-e16f-4144-b31d-db3861d30081" containerName="extract" Oct 03 15:44:45 crc kubenswrapper[5081]: E1003 15:44:45.969015 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d957f7-e16f-4144-b31d-db3861d30081" containerName="util" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.969023 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d957f7-e16f-4144-b31d-db3861d30081" containerName="util" Oct 03 15:44:45 crc kubenswrapper[5081]: E1003 15:44:45.969039 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d957f7-e16f-4144-b31d-db3861d30081" containerName="pull" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.969047 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d957f7-e16f-4144-b31d-db3861d30081" containerName="pull" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.969196 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="35d957f7-e16f-4144-b31d-db3861d30081" containerName="extract" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.970081 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.979333 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-kj5vg" Oct 03 15:44:45 crc kubenswrapper[5081]: I1003 15:44:45.999262 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92"] Oct 03 15:44:46 crc kubenswrapper[5081]: I1003 15:44:46.062935 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r27pn\" (UniqueName: \"kubernetes.io/projected/ba803456-8067-4e6c-8233-e6293b2977d7-kube-api-access-r27pn\") pod \"openstack-operator-controller-operator-86f8d7b75f-4qd92\" (UID: \"ba803456-8067-4e6c-8233-e6293b2977d7\") " pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" Oct 03 15:44:46 crc kubenswrapper[5081]: I1003 15:44:46.164135 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r27pn\" (UniqueName: \"kubernetes.io/projected/ba803456-8067-4e6c-8233-e6293b2977d7-kube-api-access-r27pn\") pod \"openstack-operator-controller-operator-86f8d7b75f-4qd92\" (UID: \"ba803456-8067-4e6c-8233-e6293b2977d7\") " pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" Oct 03 15:44:46 crc kubenswrapper[5081]: I1003 15:44:46.198799 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r27pn\" (UniqueName: \"kubernetes.io/projected/ba803456-8067-4e6c-8233-e6293b2977d7-kube-api-access-r27pn\") pod \"openstack-operator-controller-operator-86f8d7b75f-4qd92\" (UID: \"ba803456-8067-4e6c-8233-e6293b2977d7\") " pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" Oct 03 15:44:46 crc kubenswrapper[5081]: I1003 15:44:46.294455 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" Oct 03 15:44:46 crc kubenswrapper[5081]: I1003 15:44:46.726247 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92"] Oct 03 15:44:47 crc kubenswrapper[5081]: I1003 15:44:47.192324 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" event={"ID":"ba803456-8067-4e6c-8233-e6293b2977d7","Type":"ContainerStarted","Data":"2155f8fef687a0df4d8991e9442bbddc26cbda1cb411b27f3fe7e7cbc2d77a8d"} Oct 03 15:44:51 crc kubenswrapper[5081]: I1003 15:44:51.230134 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" event={"ID":"ba803456-8067-4e6c-8233-e6293b2977d7","Type":"ContainerStarted","Data":"fab2e1ce61dc2e935cfa4013d777ca765404b95f3dae4d5862c6c88afce35556"} Oct 03 15:44:53 crc kubenswrapper[5081]: I1003 15:44:53.247797 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" event={"ID":"ba803456-8067-4e6c-8233-e6293b2977d7","Type":"ContainerStarted","Data":"da3ef7b804a00b1383704c9d3b6872214f4f02adbbea7ae70441ac6cf20dc767"} Oct 03 15:44:53 crc kubenswrapper[5081]: I1003 15:44:53.249739 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" Oct 03 15:44:53 crc kubenswrapper[5081]: I1003 15:44:53.289130 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" podStartSLOduration=2.251096884 podStartE2EDuration="8.289101693s" podCreationTimestamp="2025-10-03 15:44:45 +0000 UTC" firstStartedPulling="2025-10-03 15:44:46.734330597 +0000 UTC m=+1005.699887210" lastFinishedPulling="2025-10-03 15:44:52.772335406 +0000 UTC m=+1011.737892019" observedRunningTime="2025-10-03 15:44:53.282928905 +0000 UTC m=+1012.248485518" watchObservedRunningTime="2025-10-03 15:44:53.289101693 +0000 UTC m=+1012.254658336" Oct 03 15:44:56 crc kubenswrapper[5081]: I1003 15:44:56.298584 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-86f8d7b75f-4qd92" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.152141 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs"] Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.153613 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.156433 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.157072 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.180654 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs"] Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.203353 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtchg\" (UniqueName: \"kubernetes.io/projected/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-kube-api-access-qtchg\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.203441 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-secret-volume\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.203606 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-config-volume\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.305449 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-secret-volume\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.305501 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-config-volume\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.305581 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtchg\" (UniqueName: \"kubernetes.io/projected/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-kube-api-access-qtchg\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.306476 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-config-volume\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.313515 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-secret-volume\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.323472 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtchg\" (UniqueName: \"kubernetes.io/projected/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-kube-api-access-qtchg\") pod \"collect-profiles-29325105-6t6hs\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.494501 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:00 crc kubenswrapper[5081]: I1003 15:45:00.751644 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs"] Oct 03 15:45:01 crc kubenswrapper[5081]: I1003 15:45:01.309709 5081 generic.go:334] "Generic (PLEG): container finished" podID="c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" containerID="8ada1bf2fbde3ad868bd396a3986916612c1804e08ddb3c8c21cc1cf824a1007" exitCode=0 Oct 03 15:45:01 crc kubenswrapper[5081]: I1003 15:45:01.310230 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" event={"ID":"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb","Type":"ContainerDied","Data":"8ada1bf2fbde3ad868bd396a3986916612c1804e08ddb3c8c21cc1cf824a1007"} Oct 03 15:45:01 crc kubenswrapper[5081]: I1003 15:45:01.310286 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" event={"ID":"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb","Type":"ContainerStarted","Data":"73ab7cabdfab9611c533391af199d9338f639482c0c7f2d2c82bffd71ace1c9d"} Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.646374 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.744473 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-config-volume\") pod \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.744655 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-secret-volume\") pod \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.744764 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtchg\" (UniqueName: \"kubernetes.io/projected/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-kube-api-access-qtchg\") pod \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\" (UID: \"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb\") " Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.745943 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-config-volume" (OuterVolumeSpecName: "config-volume") pod "c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" (UID: "c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.751465 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-kube-api-access-qtchg" (OuterVolumeSpecName: "kube-api-access-qtchg") pod "c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" (UID: "c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb"). InnerVolumeSpecName "kube-api-access-qtchg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.753478 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" (UID: "c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.846803 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtchg\" (UniqueName: \"kubernetes.io/projected/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-kube-api-access-qtchg\") on node \"crc\" DevicePath \"\"" Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.846868 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 15:45:02 crc kubenswrapper[5081]: I1003 15:45:02.846882 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 15:45:03 crc kubenswrapper[5081]: I1003 15:45:03.327491 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" event={"ID":"c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb","Type":"ContainerDied","Data":"73ab7cabdfab9611c533391af199d9338f639482c0c7f2d2c82bffd71ace1c9d"} Oct 03 15:45:03 crc kubenswrapper[5081]: I1003 15:45:03.327539 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73ab7cabdfab9611c533391af199d9338f639482c0c7f2d2c82bffd71ace1c9d" Oct 03 15:45:03 crc kubenswrapper[5081]: I1003 15:45:03.327672 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.457010 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45"] Oct 03 15:45:16 crc kubenswrapper[5081]: E1003 15:45:16.458147 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" containerName="collect-profiles" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.458168 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" containerName="collect-profiles" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.458310 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" containerName="collect-profiles" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.459114 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.464677 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-79h7v" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.473643 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.474981 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.478043 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-rw7wt" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.485908 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.500690 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.501879 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.517671 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-sgsrg" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.524657 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.530678 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.531818 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.534934 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-nm62h" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.543915 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.561263 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.575032 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.576624 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.582851 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-vpf6d" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.597187 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.611011 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.612354 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.617335 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-7tc88" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.617607 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.641637 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.643068 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.647292 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.647637 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-r9vf2" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.653685 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.656683 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgjth\" (UniqueName: \"kubernetes.io/projected/4bea9891-fd7e-44ed-9af7-868cb55a9a59-kube-api-access-bgjth\") pod \"glance-operator-controller-manager-d785ddfd5-mqj79\" (UID: \"4bea9891-fd7e-44ed-9af7-868cb55a9a59\") " pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.656746 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nwhg\" (UniqueName: \"kubernetes.io/projected/7d518e4e-beff-4962-83a3-e4147b2cefed-kube-api-access-7nwhg\") pod \"cinder-operator-controller-manager-8686fd99f7-cpmwx\" (UID: \"7d518e4e-beff-4962-83a3-e4147b2cefed\") " pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.656788 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75lpz\" (UniqueName: \"kubernetes.io/projected/e762bb01-e884-43df-afe3-2c4bc45136a8-kube-api-access-75lpz\") pod \"barbican-operator-controller-manager-6d6d64fdcf-xfg45\" (UID: \"e762bb01-e884-43df-afe3-2c4bc45136a8\") " pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.656839 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kps75\" (UniqueName: \"kubernetes.io/projected/fa557e19-6921-4cb5-88b0-10ee3093201c-kube-api-access-kps75\") pod \"designate-operator-controller-manager-58d86cd59d-8pt7x\" (UID: \"fa557e19-6921-4cb5-88b0-10ee3093201c\") " pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.696869 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.748441 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.754065 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-vq42n" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.759969 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761477 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwtgf\" (UniqueName: \"kubernetes.io/projected/5140942d-8224-4889-b650-7ebcd0ce93a1-kube-api-access-dwtgf\") pod \"infra-operator-controller-manager-7c9978f67-sz2f7\" (UID: \"5140942d-8224-4889-b650-7ebcd0ce93a1\") " pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761536 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgjth\" (UniqueName: \"kubernetes.io/projected/4bea9891-fd7e-44ed-9af7-868cb55a9a59-kube-api-access-bgjth\") pod \"glance-operator-controller-manager-d785ddfd5-mqj79\" (UID: \"4bea9891-fd7e-44ed-9af7-868cb55a9a59\") " pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761597 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nwhg\" (UniqueName: \"kubernetes.io/projected/7d518e4e-beff-4962-83a3-e4147b2cefed-kube-api-access-7nwhg\") pod \"cinder-operator-controller-manager-8686fd99f7-cpmwx\" (UID: \"7d518e4e-beff-4962-83a3-e4147b2cefed\") " pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761647 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75lpz\" (UniqueName: \"kubernetes.io/projected/e762bb01-e884-43df-afe3-2c4bc45136a8-kube-api-access-75lpz\") pod \"barbican-operator-controller-manager-6d6d64fdcf-xfg45\" (UID: \"e762bb01-e884-43df-afe3-2c4bc45136a8\") " pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761690 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5140942d-8224-4889-b650-7ebcd0ce93a1-cert\") pod \"infra-operator-controller-manager-7c9978f67-sz2f7\" (UID: \"5140942d-8224-4889-b650-7ebcd0ce93a1\") " pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761713 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kps75\" (UniqueName: \"kubernetes.io/projected/fa557e19-6921-4cb5-88b0-10ee3093201c-kube-api-access-kps75\") pod \"designate-operator-controller-manager-58d86cd59d-8pt7x\" (UID: \"fa557e19-6921-4cb5-88b0-10ee3093201c\") " pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761735 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5fh4\" (UniqueName: \"kubernetes.io/projected/717d0fb4-cd0f-42b2-aca0-47e6166fe5d0-kube-api-access-j5fh4\") pod \"horizon-operator-controller-manager-586b66cf4f-t8jvq\" (UID: \"717d0fb4-cd0f-42b2-aca0-47e6166fe5d0\") " pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761769 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl9nm\" (UniqueName: \"kubernetes.io/projected/e305740c-d1a1-4150-ab8f-0742d6a50db3-kube-api-access-hl9nm\") pod \"heat-operator-controller-manager-5ffbdb7ddf-s5dz2\" (UID: \"e305740c-d1a1-4150-ab8f-0742d6a50db3\") " pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.761849 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.769197 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-5kjqm" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.811771 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kps75\" (UniqueName: \"kubernetes.io/projected/fa557e19-6921-4cb5-88b0-10ee3093201c-kube-api-access-kps75\") pod \"designate-operator-controller-manager-58d86cd59d-8pt7x\" (UID: \"fa557e19-6921-4cb5-88b0-10ee3093201c\") " pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.811852 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.812662 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nwhg\" (UniqueName: \"kubernetes.io/projected/7d518e4e-beff-4962-83a3-e4147b2cefed-kube-api-access-7nwhg\") pod \"cinder-operator-controller-manager-8686fd99f7-cpmwx\" (UID: \"7d518e4e-beff-4962-83a3-e4147b2cefed\") " pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.818671 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75lpz\" (UniqueName: \"kubernetes.io/projected/e762bb01-e884-43df-afe3-2c4bc45136a8-kube-api-access-75lpz\") pod \"barbican-operator-controller-manager-6d6d64fdcf-xfg45\" (UID: \"e762bb01-e884-43df-afe3-2c4bc45136a8\") " pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.823068 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.838513 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.839525 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgjth\" (UniqueName: \"kubernetes.io/projected/4bea9891-fd7e-44ed-9af7-868cb55a9a59-kube-api-access-bgjth\") pod \"glance-operator-controller-manager-d785ddfd5-mqj79\" (UID: \"4bea9891-fd7e-44ed-9af7-868cb55a9a59\") " pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.853238 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.854741 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.861514 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.863807 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwtgf\" (UniqueName: \"kubernetes.io/projected/5140942d-8224-4889-b650-7ebcd0ce93a1-kube-api-access-dwtgf\") pod \"infra-operator-controller-manager-7c9978f67-sz2f7\" (UID: \"5140942d-8224-4889-b650-7ebcd0ce93a1\") " pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.863935 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmzx5\" (UniqueName: \"kubernetes.io/projected/1f197349-94d5-4ef3-962b-89045495d0c9-kube-api-access-hmzx5\") pod \"keystone-operator-controller-manager-6c9969c6c6-qsnkt\" (UID: \"1f197349-94d5-4ef3-962b-89045495d0c9\") " pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.864031 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwbq4\" (UniqueName: \"kubernetes.io/projected/a4711b00-4b88-47ef-9d5b-c01b57ac9b18-kube-api-access-mwbq4\") pod \"ironic-operator-controller-manager-59b5fc9845-fxw8p\" (UID: \"a4711b00-4b88-47ef-9d5b-c01b57ac9b18\") " pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.864109 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5140942d-8224-4889-b650-7ebcd0ce93a1-cert\") pod \"infra-operator-controller-manager-7c9978f67-sz2f7\" (UID: \"5140942d-8224-4889-b650-7ebcd0ce93a1\") " pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.864192 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5fh4\" (UniqueName: \"kubernetes.io/projected/717d0fb4-cd0f-42b2-aca0-47e6166fe5d0-kube-api-access-j5fh4\") pod \"horizon-operator-controller-manager-586b66cf4f-t8jvq\" (UID: \"717d0fb4-cd0f-42b2-aca0-47e6166fe5d0\") " pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.864275 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl9nm\" (UniqueName: \"kubernetes.io/projected/e305740c-d1a1-4150-ab8f-0742d6a50db3-kube-api-access-hl9nm\") pod \"heat-operator-controller-manager-5ffbdb7ddf-s5dz2\" (UID: \"e305740c-d1a1-4150-ab8f-0742d6a50db3\") " pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" Oct 03 15:45:16 crc kubenswrapper[5081]: E1003 15:45:16.864828 5081 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 03 15:45:16 crc kubenswrapper[5081]: E1003 15:45:16.864955 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5140942d-8224-4889-b650-7ebcd0ce93a1-cert podName:5140942d-8224-4889-b650-7ebcd0ce93a1 nodeName:}" failed. No retries permitted until 2025-10-03 15:45:17.364935612 +0000 UTC m=+1036.330492225 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5140942d-8224-4889-b650-7ebcd0ce93a1-cert") pod "infra-operator-controller-manager-7c9978f67-sz2f7" (UID: "5140942d-8224-4889-b650-7ebcd0ce93a1") : secret "infra-operator-webhook-server-cert" not found Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.873437 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.898632 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-nhwn9" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.903046 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.914045 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.921755 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.923160 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.936946 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-mmt6d" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.938099 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-8qznf" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.957592 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp"] Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.962720 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwtgf\" (UniqueName: \"kubernetes.io/projected/5140942d-8224-4889-b650-7ebcd0ce93a1-kube-api-access-dwtgf\") pod \"infra-operator-controller-manager-7c9978f67-sz2f7\" (UID: \"5140942d-8224-4889-b650-7ebcd0ce93a1\") " pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.964659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl9nm\" (UniqueName: \"kubernetes.io/projected/e305740c-d1a1-4150-ab8f-0742d6a50db3-kube-api-access-hl9nm\") pod \"heat-operator-controller-manager-5ffbdb7ddf-s5dz2\" (UID: \"e305740c-d1a1-4150-ab8f-0742d6a50db3\") " pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.965706 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9wk7\" (UniqueName: \"kubernetes.io/projected/85db4351-f8f6-436b-9cf1-eb28aa937b21-kube-api-access-b9wk7\") pod \"manila-operator-controller-manager-66fdd975d9-vjfhq\" (UID: \"85db4351-f8f6-436b-9cf1-eb28aa937b21\") " pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.970535 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmzx5\" (UniqueName: \"kubernetes.io/projected/1f197349-94d5-4ef3-962b-89045495d0c9-kube-api-access-hmzx5\") pod \"keystone-operator-controller-manager-6c9969c6c6-qsnkt\" (UID: \"1f197349-94d5-4ef3-962b-89045495d0c9\") " pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.970715 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwbq4\" (UniqueName: \"kubernetes.io/projected/a4711b00-4b88-47ef-9d5b-c01b57ac9b18-kube-api-access-mwbq4\") pod \"ironic-operator-controller-manager-59b5fc9845-fxw8p\" (UID: \"a4711b00-4b88-47ef-9d5b-c01b57ac9b18\") " pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.970805 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5fh4\" (UniqueName: \"kubernetes.io/projected/717d0fb4-cd0f-42b2-aca0-47e6166fe5d0-kube-api-access-j5fh4\") pod \"horizon-operator-controller-manager-586b66cf4f-t8jvq\" (UID: \"717d0fb4-cd0f-42b2-aca0-47e6166fe5d0\") " pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" Oct 03 15:45:16 crc kubenswrapper[5081]: I1003 15:45:16.974807 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.004864 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.006826 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.015316 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-7rt6x" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.032171 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.046101 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.043423 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmzx5\" (UniqueName: \"kubernetes.io/projected/1f197349-94d5-4ef3-962b-89045495d0c9-kube-api-access-hmzx5\") pod \"keystone-operator-controller-manager-6c9969c6c6-qsnkt\" (UID: \"1f197349-94d5-4ef3-962b-89045495d0c9\") " pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.033130 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwbq4\" (UniqueName: \"kubernetes.io/projected/a4711b00-4b88-47ef-9d5b-c01b57ac9b18-kube-api-access-mwbq4\") pod \"ironic-operator-controller-manager-59b5fc9845-fxw8p\" (UID: \"a4711b00-4b88-47ef-9d5b-c01b57ac9b18\") " pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.052419 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-tmvpl" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.075667 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb59j\" (UniqueName: \"kubernetes.io/projected/61233e66-00aa-4863-be3d-56231db9d643-kube-api-access-pb59j\") pod \"mariadb-operator-controller-manager-696ff4bcdd-92bxf\" (UID: \"61233e66-00aa-4863-be3d-56231db9d643\") " pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.080068 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9wk7\" (UniqueName: \"kubernetes.io/projected/85db4351-f8f6-436b-9cf1-eb28aa937b21-kube-api-access-b9wk7\") pod \"manila-operator-controller-manager-66fdd975d9-vjfhq\" (UID: \"85db4351-f8f6-436b-9cf1-eb28aa937b21\") " pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.086773 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbrpg\" (UniqueName: \"kubernetes.io/projected/a810b266-4fbd-4034-add2-362aa5496443-kube-api-access-jbrpg\") pod \"neutron-operator-controller-manager-549fb68678-xrlvp\" (UID: \"a810b266-4fbd-4034-add2-362aa5496443\") " pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.082212 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.097573 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.099101 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.113143 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.159798 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.186707 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9wk7\" (UniqueName: \"kubernetes.io/projected/85db4351-f8f6-436b-9cf1-eb28aa937b21-kube-api-access-b9wk7\") pod \"manila-operator-controller-manager-66fdd975d9-vjfhq\" (UID: \"85db4351-f8f6-436b-9cf1-eb28aa937b21\") " pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.203723 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.207277 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svlgx\" (UniqueName: \"kubernetes.io/projected/5d0a686a-cc92-40dc-a408-9b02863a2337-kube-api-access-svlgx\") pod \"nova-operator-controller-manager-5b45478b88-glxmc\" (UID: \"5d0a686a-cc92-40dc-a408-9b02863a2337\") " pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.207327 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfqdz\" (UniqueName: \"kubernetes.io/projected/109fc9da-53eb-440c-9e33-60388a4ec529-kube-api-access-gfqdz\") pod \"octavia-operator-controller-manager-b4444585c-qx7v7\" (UID: \"109fc9da-53eb-440c-9e33-60388a4ec529\") " pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.207369 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbrpg\" (UniqueName: \"kubernetes.io/projected/a810b266-4fbd-4034-add2-362aa5496443-kube-api-access-jbrpg\") pod \"neutron-operator-controller-manager-549fb68678-xrlvp\" (UID: \"a810b266-4fbd-4034-add2-362aa5496443\") " pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.207429 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb59j\" (UniqueName: \"kubernetes.io/projected/61233e66-00aa-4863-be3d-56231db9d643-kube-api-access-pb59j\") pod \"mariadb-operator-controller-manager-696ff4bcdd-92bxf\" (UID: \"61233e66-00aa-4863-be3d-56231db9d643\") " pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.222680 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.237942 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbrpg\" (UniqueName: \"kubernetes.io/projected/a810b266-4fbd-4034-add2-362aa5496443-kube-api-access-jbrpg\") pod \"neutron-operator-controller-manager-549fb68678-xrlvp\" (UID: \"a810b266-4fbd-4034-add2-362aa5496443\") " pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.238512 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.239652 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb59j\" (UniqueName: \"kubernetes.io/projected/61233e66-00aa-4863-be3d-56231db9d643-kube-api-access-pb59j\") pod \"mariadb-operator-controller-manager-696ff4bcdd-92bxf\" (UID: \"61233e66-00aa-4863-be3d-56231db9d643\") " pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.267003 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.268420 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.273802 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.274111 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-6h5rf" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.279269 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.280678 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.285142 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-9lnp8" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.300515 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.301026 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.302791 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.317656 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-npp6m" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.321051 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svlgx\" (UniqueName: \"kubernetes.io/projected/5d0a686a-cc92-40dc-a408-9b02863a2337-kube-api-access-svlgx\") pod \"nova-operator-controller-manager-5b45478b88-glxmc\" (UID: \"5d0a686a-cc92-40dc-a408-9b02863a2337\") " pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.321085 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfqdz\" (UniqueName: \"kubernetes.io/projected/109fc9da-53eb-440c-9e33-60388a4ec529-kube-api-access-gfqdz\") pod \"octavia-operator-controller-manager-b4444585c-qx7v7\" (UID: \"109fc9da-53eb-440c-9e33-60388a4ec529\") " pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.324932 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.330837 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.344639 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.344727 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.345397 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.346453 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.354253 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-5m8vd" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.362133 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.373747 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svlgx\" (UniqueName: \"kubernetes.io/projected/5d0a686a-cc92-40dc-a408-9b02863a2337-kube-api-access-svlgx\") pod \"nova-operator-controller-manager-5b45478b88-glxmc\" (UID: \"5d0a686a-cc92-40dc-a408-9b02863a2337\") " pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.385456 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.387358 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.391975 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-mv6lr" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.410393 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfqdz\" (UniqueName: \"kubernetes.io/projected/109fc9da-53eb-440c-9e33-60388a4ec529-kube-api-access-gfqdz\") pod \"octavia-operator-controller-manager-b4444585c-qx7v7\" (UID: \"109fc9da-53eb-440c-9e33-60388a4ec529\") " pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.459235 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.486146 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6vzp\" (UniqueName: \"kubernetes.io/projected/73d438ca-470e-400d-9314-6567907fa58e-kube-api-access-x6vzp\") pod \"openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw\" (UID: \"73d438ca-470e-400d-9314-6567907fa58e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.486205 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktp6f\" (UniqueName: \"kubernetes.io/projected/cef2a91a-fb0a-418a-bc2a-83e535750cbd-kube-api-access-ktp6f\") pod \"ovn-operator-controller-manager-855d7949fc-2w6tr\" (UID: \"cef2a91a-fb0a-418a-bc2a-83e535750cbd\") " pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.486294 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5140942d-8224-4889-b650-7ebcd0ce93a1-cert\") pod \"infra-operator-controller-manager-7c9978f67-sz2f7\" (UID: \"5140942d-8224-4889-b650-7ebcd0ce93a1\") " pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.486336 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkn7w\" (UniqueName: \"kubernetes.io/projected/423af876-fc14-4fba-8835-4127010e0888-kube-api-access-lkn7w\") pod \"placement-operator-controller-manager-ccbfcb8c-hw6vw\" (UID: \"423af876-fc14-4fba-8835-4127010e0888\") " pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.487005 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.487222 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnq8t\" (UniqueName: \"kubernetes.io/projected/09b461bb-473a-4d23-b18c-00d456eb8810-kube-api-access-wnq8t\") pod \"telemetry-operator-controller-manager-5ffb97cddf-wzqnn\" (UID: \"09b461bb-473a-4d23-b18c-00d456eb8810\") " pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.487273 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/73d438ca-470e-400d-9314-6567907fa58e-cert\") pod \"openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw\" (UID: \"73d438ca-470e-400d-9314-6567907fa58e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.487326 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c98cz\" (UniqueName: \"kubernetes.io/projected/770e893d-89aa-417c-9455-599c14023853-kube-api-access-c98cz\") pod \"swift-operator-controller-manager-76d5577b-4ngsx\" (UID: \"770e893d-89aa-417c-9455-599c14023853\") " pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.494115 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.521163 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5140942d-8224-4889-b650-7ebcd0ce93a1-cert\") pod \"infra-operator-controller-manager-7c9978f67-sz2f7\" (UID: \"5140942d-8224-4889-b650-7ebcd0ce93a1\") " pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.542628 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.544110 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.550477 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-wp7b8" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.563943 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.573164 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.575487 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.582282 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-s7vhx" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.589142 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktp6f\" (UniqueName: \"kubernetes.io/projected/cef2a91a-fb0a-418a-bc2a-83e535750cbd-kube-api-access-ktp6f\") pod \"ovn-operator-controller-manager-855d7949fc-2w6tr\" (UID: \"cef2a91a-fb0a-418a-bc2a-83e535750cbd\") " pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.589911 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkn7w\" (UniqueName: \"kubernetes.io/projected/423af876-fc14-4fba-8835-4127010e0888-kube-api-access-lkn7w\") pod \"placement-operator-controller-manager-ccbfcb8c-hw6vw\" (UID: \"423af876-fc14-4fba-8835-4127010e0888\") " pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.590124 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7czg\" (UniqueName: \"kubernetes.io/projected/cc394603-b291-47e6-b048-1668f1857a84-kube-api-access-b7czg\") pod \"watcher-operator-controller-manager-5595cf6c95-2c4gm\" (UID: \"cc394603-b291-47e6-b048-1668f1857a84\") " pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.590298 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnq8t\" (UniqueName: \"kubernetes.io/projected/09b461bb-473a-4d23-b18c-00d456eb8810-kube-api-access-wnq8t\") pod \"telemetry-operator-controller-manager-5ffb97cddf-wzqnn\" (UID: \"09b461bb-473a-4d23-b18c-00d456eb8810\") " pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.590440 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/73d438ca-470e-400d-9314-6567907fa58e-cert\") pod \"openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw\" (UID: \"73d438ca-470e-400d-9314-6567907fa58e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.590553 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h82rg\" (UniqueName: \"kubernetes.io/projected/a5c423f5-481d-4557-ac32-30285a8d7ed9-kube-api-access-h82rg\") pod \"test-operator-controller-manager-6bb6dcddc-j47qs\" (UID: \"a5c423f5-481d-4557-ac32-30285a8d7ed9\") " pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.590688 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c98cz\" (UniqueName: \"kubernetes.io/projected/770e893d-89aa-417c-9455-599c14023853-kube-api-access-c98cz\") pod \"swift-operator-controller-manager-76d5577b-4ngsx\" (UID: \"770e893d-89aa-417c-9455-599c14023853\") " pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.590851 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6vzp\" (UniqueName: \"kubernetes.io/projected/73d438ca-470e-400d-9314-6567907fa58e-kube-api-access-x6vzp\") pod \"openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw\" (UID: \"73d438ca-470e-400d-9314-6567907fa58e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:17 crc kubenswrapper[5081]: E1003 15:45:17.590607 5081 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 15:45:17 crc kubenswrapper[5081]: E1003 15:45:17.591338 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/73d438ca-470e-400d-9314-6567907fa58e-cert podName:73d438ca-470e-400d-9314-6567907fa58e nodeName:}" failed. No retries permitted until 2025-10-03 15:45:18.091311946 +0000 UTC m=+1037.056868559 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/73d438ca-470e-400d-9314-6567907fa58e-cert") pod "openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" (UID: "73d438ca-470e-400d-9314-6567907fa58e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.606170 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.618669 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.632797 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.647275 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktp6f\" (UniqueName: \"kubernetes.io/projected/cef2a91a-fb0a-418a-bc2a-83e535750cbd-kube-api-access-ktp6f\") pod \"ovn-operator-controller-manager-855d7949fc-2w6tr\" (UID: \"cef2a91a-fb0a-418a-bc2a-83e535750cbd\") " pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.651999 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkn7w\" (UniqueName: \"kubernetes.io/projected/423af876-fc14-4fba-8835-4127010e0888-kube-api-access-lkn7w\") pod \"placement-operator-controller-manager-ccbfcb8c-hw6vw\" (UID: \"423af876-fc14-4fba-8835-4127010e0888\") " pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.654922 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6vzp\" (UniqueName: \"kubernetes.io/projected/73d438ca-470e-400d-9314-6567907fa58e-kube-api-access-x6vzp\") pod \"openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw\" (UID: \"73d438ca-470e-400d-9314-6567907fa58e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.655083 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnq8t\" (UniqueName: \"kubernetes.io/projected/09b461bb-473a-4d23-b18c-00d456eb8810-kube-api-access-wnq8t\") pod \"telemetry-operator-controller-manager-5ffb97cddf-wzqnn\" (UID: \"09b461bb-473a-4d23-b18c-00d456eb8810\") " pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.682531 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c98cz\" (UniqueName: \"kubernetes.io/projected/770e893d-89aa-417c-9455-599c14023853-kube-api-access-c98cz\") pod \"swift-operator-controller-manager-76d5577b-4ngsx\" (UID: \"770e893d-89aa-417c-9455-599c14023853\") " pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.692542 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h82rg\" (UniqueName: \"kubernetes.io/projected/a5c423f5-481d-4557-ac32-30285a8d7ed9-kube-api-access-h82rg\") pod \"test-operator-controller-manager-6bb6dcddc-j47qs\" (UID: \"a5c423f5-481d-4557-ac32-30285a8d7ed9\") " pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.692698 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7czg\" (UniqueName: \"kubernetes.io/projected/cc394603-b291-47e6-b048-1668f1857a84-kube-api-access-b7czg\") pod \"watcher-operator-controller-manager-5595cf6c95-2c4gm\" (UID: \"cc394603-b291-47e6-b048-1668f1857a84\") " pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.711763 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.713244 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.724030 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-5ssjl" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.724310 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.726130 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.729226 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.732974 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h82rg\" (UniqueName: \"kubernetes.io/projected/a5c423f5-481d-4557-ac32-30285a8d7ed9-kube-api-access-h82rg\") pod \"test-operator-controller-manager-6bb6dcddc-j47qs\" (UID: \"a5c423f5-481d-4557-ac32-30285a8d7ed9\") " pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.740450 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.741535 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.745894 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7czg\" (UniqueName: \"kubernetes.io/projected/cc394603-b291-47e6-b048-1668f1857a84-kube-api-access-b7czg\") pod \"watcher-operator-controller-manager-5595cf6c95-2c4gm\" (UID: \"cc394603-b291-47e6-b048-1668f1857a84\") " pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.748676 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-5lz74" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.770666 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.798336 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.826638 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79"] Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.874259 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.885065 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.895405 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gmt5\" (UniqueName: \"kubernetes.io/projected/3a7aaeef-f949-461a-be01-805e945451d3-kube-api-access-9gmt5\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-tptrj\" (UID: \"3a7aaeef-f949-461a-be01-805e945451d3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.895879 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m84g8\" (UniqueName: \"kubernetes.io/projected/83299e0a-8094-4228-86f2-e0b290cd3571-kube-api-access-m84g8\") pod \"openstack-operator-controller-manager-54df7874c5-tnb4t\" (UID: \"83299e0a-8094-4228-86f2-e0b290cd3571\") " pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.895987 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/83299e0a-8094-4228-86f2-e0b290cd3571-cert\") pod \"openstack-operator-controller-manager-54df7874c5-tnb4t\" (UID: \"83299e0a-8094-4228-86f2-e0b290cd3571\") " pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.932744 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.968775 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" Oct 03 15:45:17 crc kubenswrapper[5081]: I1003 15:45:17.973299 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x"] Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:17.998937 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gmt5\" (UniqueName: \"kubernetes.io/projected/3a7aaeef-f949-461a-be01-805e945451d3-kube-api-access-9gmt5\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-tptrj\" (UID: \"3a7aaeef-f949-461a-be01-805e945451d3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:17.998995 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m84g8\" (UniqueName: \"kubernetes.io/projected/83299e0a-8094-4228-86f2-e0b290cd3571-kube-api-access-m84g8\") pod \"openstack-operator-controller-manager-54df7874c5-tnb4t\" (UID: \"83299e0a-8094-4228-86f2-e0b290cd3571\") " pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:17.999065 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/83299e0a-8094-4228-86f2-e0b290cd3571-cert\") pod \"openstack-operator-controller-manager-54df7874c5-tnb4t\" (UID: \"83299e0a-8094-4228-86f2-e0b290cd3571\") " pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:18 crc kubenswrapper[5081]: E1003 15:45:17.999241 5081 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 03 15:45:18 crc kubenswrapper[5081]: E1003 15:45:17.999296 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83299e0a-8094-4228-86f2-e0b290cd3571-cert podName:83299e0a-8094-4228-86f2-e0b290cd3571 nodeName:}" failed. No retries permitted until 2025-10-03 15:45:18.499278151 +0000 UTC m=+1037.464834764 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/83299e0a-8094-4228-86f2-e0b290cd3571-cert") pod "openstack-operator-controller-manager-54df7874c5-tnb4t" (UID: "83299e0a-8094-4228-86f2-e0b290cd3571") : secret "webhook-server-cert" not found Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.031103 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m84g8\" (UniqueName: \"kubernetes.io/projected/83299e0a-8094-4228-86f2-e0b290cd3571-kube-api-access-m84g8\") pod \"openstack-operator-controller-manager-54df7874c5-tnb4t\" (UID: \"83299e0a-8094-4228-86f2-e0b290cd3571\") " pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.044278 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gmt5\" (UniqueName: \"kubernetes.io/projected/3a7aaeef-f949-461a-be01-805e945451d3-kube-api-access-9gmt5\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-tptrj\" (UID: \"3a7aaeef-f949-461a-be01-805e945451d3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" Oct 03 15:45:18 crc kubenswrapper[5081]: W1003 15:45:18.048283 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa557e19_6921_4cb5_88b0_10ee3093201c.slice/crio-14d57848d35b07243aaf7e95c91a731ee056907de19b310a28f136ccfd1a8086 WatchSource:0}: Error finding container 14d57848d35b07243aaf7e95c91a731ee056907de19b310a28f136ccfd1a8086: Status 404 returned error can't find the container with id 14d57848d35b07243aaf7e95c91a731ee056907de19b310a28f136ccfd1a8086 Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.077208 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.102315 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/73d438ca-470e-400d-9314-6567907fa58e-cert\") pod \"openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw\" (UID: \"73d438ca-470e-400d-9314-6567907fa58e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.105920 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45"] Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.110949 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/73d438ca-470e-400d-9314-6567907fa58e-cert\") pod \"openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw\" (UID: \"73d438ca-470e-400d-9314-6567907fa58e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.130744 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx"] Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.280966 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.334446 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p"] Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.491256 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt"] Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.507744 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" event={"ID":"4bea9891-fd7e-44ed-9af7-868cb55a9a59","Type":"ContainerStarted","Data":"2019c66b6d0418e25496447193caa570d69b75eb576dcc61f2eb0c1f2d997fc4"} Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.509036 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/83299e0a-8094-4228-86f2-e0b290cd3571-cert\") pod \"openstack-operator-controller-manager-54df7874c5-tnb4t\" (UID: \"83299e0a-8094-4228-86f2-e0b290cd3571\") " pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.510137 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" event={"ID":"7d518e4e-beff-4962-83a3-e4147b2cefed","Type":"ContainerStarted","Data":"489e24f4244eccd239b00e374403e797d574a22b01bf6d59abcc09c23cb28cf0"} Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.511571 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" event={"ID":"a4711b00-4b88-47ef-9d5b-c01b57ac9b18","Type":"ContainerStarted","Data":"7deca5d78e93a0d7e8bf543f1f84c015397b1ff091d3d5c028d5b9d9342dde56"} Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.513418 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" event={"ID":"e762bb01-e884-43df-afe3-2c4bc45136a8","Type":"ContainerStarted","Data":"ace7165194421285f1e3f9a1634c40f762c5efbf9c8af37481a9ef3966532dca"} Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.514741 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" event={"ID":"fa557e19-6921-4cb5-88b0-10ee3093201c","Type":"ContainerStarted","Data":"14d57848d35b07243aaf7e95c91a731ee056907de19b310a28f136ccfd1a8086"} Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.519161 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/83299e0a-8094-4228-86f2-e0b290cd3571-cert\") pod \"openstack-operator-controller-manager-54df7874c5-tnb4t\" (UID: \"83299e0a-8094-4228-86f2-e0b290cd3571\") " pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:18 crc kubenswrapper[5081]: W1003 15:45:18.535715 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f197349_94d5_4ef3_962b_89045495d0c9.slice/crio-6359f275a0d2616cf9a02ed614f1c651c4585e3f902cef3e9651151174506c4e WatchSource:0}: Error finding container 6359f275a0d2616cf9a02ed614f1c651c4585e3f902cef3e9651151174506c4e: Status 404 returned error can't find the container with id 6359f275a0d2616cf9a02ed614f1c651c4585e3f902cef3e9651151174506c4e Oct 03 15:45:18 crc kubenswrapper[5081]: I1003 15:45:18.644019 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.000800 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.013259 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.018711 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.028691 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.044794 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.064639 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.064710 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.075258 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf"] Oct 03 15:45:19 crc kubenswrapper[5081]: W1003 15:45:19.094640 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode305740c_d1a1_4150_ab8f_0742d6a50db3.slice/crio-0263c5e4eb8dba12583b24442dc901990727020aa82734f3ebd3b1f7153d4b35 WatchSource:0}: Error finding container 0263c5e4eb8dba12583b24442dc901990727020aa82734f3ebd3b1f7153d4b35: Status 404 returned error can't find the container with id 0263c5e4eb8dba12583b24442dc901990727020aa82734f3ebd3b1f7153d4b35 Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.115520 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.127474 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.145479 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.154346 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx"] Oct 03 15:45:19 crc kubenswrapper[5081]: W1003 15:45:19.161877 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5c423f5_481d_4557_ac32_30285a8d7ed9.slice/crio-6374a2c9c9bcf340f5d5a4af1c21623082217b982b14dfa2e69dd9b4d24e4be0 WatchSource:0}: Error finding container 6374a2c9c9bcf340f5d5a4af1c21623082217b982b14dfa2e69dd9b4d24e4be0: Status 404 returned error can't find the container with id 6374a2c9c9bcf340f5d5a4af1c21623082217b982b14dfa2e69dd9b4d24e4be0 Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.164234 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj"] Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.174980 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr"] Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.196007 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-c98cz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-76d5577b-4ngsx_openstack-operators(770e893d-89aa-417c-9455-599c14023853): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.202779 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw"] Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.203606 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b7czg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5595cf6c95-2c4gm_openstack-operators(cc394603-b291-47e6-b048-1668f1857a84): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 15:45:19 crc kubenswrapper[5081]: W1003 15:45:19.206626 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcef2a91a_fb0a_418a_bc2a_83e535750cbd.slice/crio-3d0d4a9cfb423bfd5fdc0f8d7ef2d7ae3d4a74e0edc9d5f6f4153c8472b9501d WatchSource:0}: Error finding container 3d0d4a9cfb423bfd5fdc0f8d7ef2d7ae3d4a74e0edc9d5f6f4153c8472b9501d: Status 404 returned error can't find the container with id 3d0d4a9cfb423bfd5fdc0f8d7ef2d7ae3d4a74e0edc9d5f6f4153c8472b9501d Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.219144 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9gmt5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-tptrj_openstack-operators(3a7aaeef-f949-461a-be01-805e945451d3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.220436 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" podUID="3a7aaeef-f949-461a-be01-805e945451d3" Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.220348 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent@sha256:d1fad97d2cd602a4f7b6fd6c202464ac117b20e6608c17aa04cadbceb78a498d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:1c99923410d4cd0a721d2cc8a51d91d3ac800d5fda508c972ebe1e85ed2ca4d0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api@sha256:af4e2467469edf3b1fa739ef819ead98dfa934542ae40ec3266d58f66ba44f99,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator@sha256:99f246f3b9bad7c46b671da12cd166614f0573b3dbf0aa04f4b32d4a9f5a81c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener@sha256:d617f09ab1f6ef522c6f70db597cf20ab79ccebf25e225653cbf2e999354a5c0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier@sha256:1c73b7b1034524ecfb36ce1eaa37ecbbcd5cb3f7fee0149b3bce0b0170bae8ce,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:9e14abeaab473b6731830d9c5bf383bb52111c919c787aee06b833f8cd3f83b1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener@sha256:0838a5c5edf54c1c8af59c93955f26e4eda6645297058780e0f61c77b65683d9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker@sha256:c50baa554100db160210b65733f71d6d128e38f96fa0552819854c62ede75953,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:e43273f867316a0e03469d82dc37487d3cdd2b08b4a153ba270c7cae1749bf92,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute@sha256:de50c7dd282aa3898f1d0a31ecb2a300688f1f234662e6bbe12f35f88b484083,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi@sha256:31c0d98fec7ff16416903874af0addeff03a7e72ede256990f2a71589e8be5ce,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter@sha256:7211a617ec657701ca819aa0ba28e1d5750f5bf2c1391b755cc4a48cc360b0fa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification@sha256:ac586b71d28a6240b29f4b464b19fea812ffc81e1182d172570b4be5ac58ea70,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:f4b02f57187855a6adb5b32d9a8ed92dea2376471c6e33783b4c45f4b56b0166,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup@sha256:a5df039c808a65a273073128a627d6700897d6ebf81a9c62412c7d06be3b9a6e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler@sha256:8f09cdc578caa07e0b5a9ec4e96a251a6d7dd43b2ef1edacb56543c997c259e4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume@sha256:e870d0a1b0c758601a067bfccc539ca04222e0c867872f679cea5833e0fcbf94,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api@sha256:8f112731484f983f272f4c95558ffa098e96e610ddc5130ee0f2b2a239e9058a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9@sha256:277ac4620d95ce3fe2f552f59b82b70962ba024d498710adc45b863bcc7244ff,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central@sha256:09eebb1f87217fbb0249f4ebc19192cd282833aac27103081160b8949dd4361c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns@sha256:e17eb4221e8981df97744e5168a8c759abcd925c2a483d04e3fdecd78128dae4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer@sha256:02f99d84c8cc2c59ac4b8d98f219a1138b0aed8e50f91f9326ef55db5c187cd8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound@sha256:7a636c7f518127d4292aa5417113fd611b85ad49ddbc8273455aa2fe5066a533,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker@sha256:61f617fd809b55b2eceeec84b3283757af80d1001659e80877ac69e9643ba89f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr@sha256:0b083fceb6e323a30f4c7308a275ea88243420ef38df77ac322af302c4c4dd2d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid@sha256:9e173574f9216e5c42498c3794075ead54b6850c66094c4be628b52063f5814c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler@sha256:581b65b646301e0fcb07582150ba63438f1353a85bf9acf1eb2acb4ce71c58bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron@sha256:980d0d43a83e61b74634b46864c2070fcb26348f8bc5a3375f161703e4041d3d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd@sha256:d561737cf54869c67a819635c4a10ca4a9ed21cc6046ffd4f17301670d9a25fd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent@sha256:941076bbb1577abd91f42e0f19b0a191f7e393135d823ed203b122875033888b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn@sha256:2133db6669a24570a266e7c053fc71bbfadd16cd9cd0bc8b87633e73c03c4719,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent@sha256:55682010f0f5aea02f59df1e0a827cc6915048b7545c25432fb0cb8501898d0b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent@sha256:814536e8e4848f6612cd4ada641d46ae7d766878b89918fc5df11f3930747d3a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter@sha256:39c642b2b337e38c18e80266fb14383754178202f40103646337722a594d984c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent@sha256:a4f12a27e60f17034ba47f57dba0c5ae3f9e3c6c681f2e417bb87cb132f502e7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter@sha256:d339ba049bbd1adccb795962bf163f5b22fd84dea865d88b9eb525e46247d6bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:2069e730d5ced0e278392077ad261a3c35bf5df1d88735441859f23e8e3ceb24,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api@sha256:17b8c6c9fbcc7092cba64a264adb9af6decd7db24ee2c60607a9045d55031b51,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn@sha256:f0864392605772b30f07dcb67ec8bb75d5b779756c537983377044d899c1b099,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine@sha256:d9a44db937205e4c4f2cd2d247d230de2eb9207089f35a7ae7cfb11301406fac,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon@sha256:1ab1deb86e7e5ba67b4cd9f5974de6707e5a5948e8f01fc1156dbf5e452340a3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached@sha256:a895c2b3a12aa21f9541a76213b6058ce3252aca002d66025d5935f4ea5873c7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis@sha256:e7c778fd348f881fea490bb9ddf465347068a60fcd65f9cbfedb615815bba2a2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api@sha256:a21c91d6927d863be8aef3023a527bc3466a0ddffc018df0c970ce14396ceee0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor@sha256:7053c79b8354195fd09a5ea1347ad49a35443923d4e4578f80615c63d83313d3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector@sha256:4626ebaa9dbe27fc95b31a48e69397fadef7c9779670c01555f872873c393f74,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent@sha256:c840d7e9775d7f7ed1c6700d973bef79318fe92ac6fc8ed0616dcec13ef95c92,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe@sha256:fcb50aade382ff516554b84b45c742a5adafb460fd67bd0fa2fc7cbb30adf5c1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent@sha256:54373b05fcd33538d153507943da0c118e303a5c61a19c6bbe79a0786fe8ce1d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone@sha256:8c9be58185245280d7282e8973cc6e23e6b08520ce126aeb91cfbcef0c144690,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api@sha256:676ba6130835d00defc3214769d5fe1827ee41420a05f8556f361aac502a7efc,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler@sha256:3dbd2ac58b5f64ab3cf3eef3c44a52f0ccd363568c0739a5d18d6b9c9edddf5d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share@sha256:fded6f454a54e601894e06989243e8896f43940c77cd8f4c904fe43c120b1595,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5d10c016b13499110b5f9ca2bccfaf6d2fd4298c9f02580d7208fe91850da0a6,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils@sha256:43f2c4ec2e38934288015cb5d5ae92941e8b3fa9a613539175641e2c16cfc0cc,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api@sha256:c506a314e354f1ab274c46f9969b254f820e7515bbd9a24c9877dfbb10ece37e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute@sha256:96d4b699758dd3d408b4c672dbe4392fd09783b4dc60783389905d7220b6524c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor@sha256:7a0f3de7dda85fba7ad2929c7b01a2d42c11df9fe83f47a8e499a9da51e7f48c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy@sha256:49b5ae7f895266b90cf3c02503fb7146726e59ad782fdf88112ad6954112d7e4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler@sha256:19b3d48b3c29eaa3a6d76fc145e212389f245c077bbf24eb5c1de0c96f3f7190,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api@sha256:227891a9f4821a92c49ddc27301303287d5632b6ac199e9fe402581f1831ec01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager@sha256:26e3ada4b9fee357ef8bbb1c342b38c49c096ede8a498116e3753ad45354fb47,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping@sha256:2789b45ae2a5a9a80e4864e691f9e32fb9c9e1938cf92bda7c07defdbc78cdc2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog@sha256:8df8259e737625667b13897dc0094bf3d7ced54f414dda93293ad4cb68af1d43,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker@sha256:2fe4f8c71e11a926450d6553e5cb5c7b2db5d0de8426aa969f30d3d566114ff8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:ab5265aef98352336f23b18080f3ba110250859dc0edc20819348311a4a53044,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather@sha256:943eee724277e252795909137538a553ef5284c8103ad01b9be7b0138c66d14d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter@sha256:ecd56e6733c475f2d441344fd98f288c3eac0261ba113695fec7520a954ccbc7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi@sha256:a3c1b94a285064d150145340c06ad5b0afc4aa20caa74523f3972c19b1d1ea61,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:129e24971fee94cc60b5f440605f1512fb932a884e38e64122f38f11f942e3b9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:b96baffbb926f93936bd52f2a1ef4fe1d31bb469d6489e9fb67bf00b99156551,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:d659d1ffbbaff7c76fc96e6600dc9b03c53af2c9d63cfb4626dfb5831b7b1ad7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:e3fcd72e1a2790ca7db5d5c40c1ae597de4b020dd51debcab063352e6e5f7d79,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server@sha256:2504c0db038b850cdd6057fc50e109715a4453c386e4f4d4f901a20dc7b2036a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api@sha256:35c124624fd84930496975032e22d57e517c5958e71ba63124a306a5949c71d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e3accbf4293c544194bd2151d4d0bd8b26828ddacda968bad5d5a6f05c2406db,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account@sha256:75ce8c4f9c68aaba6cab59749e726b2f94d29ba7b7897b18112fe1bd350efd8f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container@sha256:6390af78808d7cd69a4f5c7cb88f47690e54c9b8838b9461f4b21c4127ce770c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object@sha256:14489a8a681c482a643cb47fa90d0a3596b4570e13cfc760541ac80d37cd31b3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server@sha256:87367a67c7cb73476fb8d08ba108da843ac61170381458608e778a33c024c0c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all@sha256:d6123a9349d422888df97ee72d32643dd534f81c521f6f313c5d5e64e2db60c1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api@sha256:b273fd1e1da4190dc4cc67469d180b66b5a22eb6ec9afc76ef36dd6ea2beaea5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier@sha256:9561306ec9455914cd05a0a0b3e56d72c7164aa41d0f0ef9b03ac7d7343538b8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine@sha256:1115e5a2dce397b4a34a082cba1937903818ab5928048fcf775c4a4e6dda2d07,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6vzp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw_openstack-operators(73d438ca-470e-400d-9314-6567907fa58e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.221839 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ktp6f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-855d7949fc-2w6tr_openstack-operators(cef2a91a-fb0a-418a-bc2a-83e535750cbd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.307280 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw"] Oct 03 15:45:19 crc kubenswrapper[5081]: W1003 15:45:19.351598 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod423af876_fc14_4fba_8835_4127010e0888.slice/crio-88646968ab108e10011aa1b8b08970f1ac8dadcbb0835f16df81b73484c352d3 WatchSource:0}: Error finding container 88646968ab108e10011aa1b8b08970f1ac8dadcbb0835f16df81b73484c352d3: Status 404 returned error can't find the container with id 88646968ab108e10011aa1b8b08970f1ac8dadcbb0835f16df81b73484c352d3 Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.371388 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t"] Oct 03 15:45:19 crc kubenswrapper[5081]: W1003 15:45:19.408755 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83299e0a_8094_4228_86f2_e0b290cd3571.slice/crio-878092a08af42392484398e4f5271643e289281c6deadd20656ab990eb647437 WatchSource:0}: Error finding container 878092a08af42392484398e4f5271643e289281c6deadd20656ab990eb647437: Status 404 returned error can't find the container with id 878092a08af42392484398e4f5271643e289281c6deadd20656ab990eb647437 Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.524678 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" event={"ID":"e305740c-d1a1-4150-ab8f-0742d6a50db3","Type":"ContainerStarted","Data":"0263c5e4eb8dba12583b24442dc901990727020aa82734f3ebd3b1f7153d4b35"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.526217 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" event={"ID":"5140942d-8224-4889-b650-7ebcd0ce93a1","Type":"ContainerStarted","Data":"550bc3517b2abb64012c5bc420838c4ee651d10d57b0cb525257ab4659f10e60"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.528446 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" event={"ID":"73d438ca-470e-400d-9314-6567907fa58e","Type":"ContainerStarted","Data":"d6b4f08e4ac5129b93de214b29004cd1f4a53683b833814bb214ac6102c87c2d"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.531192 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" event={"ID":"85db4351-f8f6-436b-9cf1-eb28aa937b21","Type":"ContainerStarted","Data":"7dc91e35d0b783fcb7c25e02f95340489b5e837cb4cae54f72785dacc7cf4362"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.534392 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" event={"ID":"770e893d-89aa-417c-9455-599c14023853","Type":"ContainerStarted","Data":"f469445ce4ec7d62956ebc808eb666b312551675d4377c2f3639217401c590a0"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.544455 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" event={"ID":"a810b266-4fbd-4034-add2-362aa5496443","Type":"ContainerStarted","Data":"f77186d4c54db752c1c0d4b52240e061d95b21ff4fc4d7ac2f94fe0db8a7ba0f"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.557238 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" event={"ID":"09b461bb-473a-4d23-b18c-00d456eb8810","Type":"ContainerStarted","Data":"0b8c1510a230650c52670f4494bc2fcc7e05abf126fb3742249ee4b42c6d5c0e"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.560296 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" event={"ID":"109fc9da-53eb-440c-9e33-60388a4ec529","Type":"ContainerStarted","Data":"aa8c98ad40d8459c2abc39952aec16e9f80005e92d10eec56b7023a7a6f43c12"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.563250 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" event={"ID":"5d0a686a-cc92-40dc-a408-9b02863a2337","Type":"ContainerStarted","Data":"fe997a68a48074c5e841b1e655852ec95e1f0333e99609dc314e2028a1251a71"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.569592 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" event={"ID":"cc394603-b291-47e6-b048-1668f1857a84","Type":"ContainerStarted","Data":"3e6d23032fa1048ad10eb04d1b8824d957858f6d046fa18773f12cbe81da5c99"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.571702 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" event={"ID":"717d0fb4-cd0f-42b2-aca0-47e6166fe5d0","Type":"ContainerStarted","Data":"62f6ebad9eb52f2e753128a14875c9465f590923f26a7e20c991129416e20813"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.574485 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" event={"ID":"61233e66-00aa-4863-be3d-56231db9d643","Type":"ContainerStarted","Data":"cbc07eca28834a2e08cdeb87b6376f49af147285f93ebcc534f7b5a0ec7db2e2"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.576145 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" event={"ID":"a5c423f5-481d-4557-ac32-30285a8d7ed9","Type":"ContainerStarted","Data":"6374a2c9c9bcf340f5d5a4af1c21623082217b982b14dfa2e69dd9b4d24e4be0"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.577104 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" event={"ID":"83299e0a-8094-4228-86f2-e0b290cd3571","Type":"ContainerStarted","Data":"878092a08af42392484398e4f5271643e289281c6deadd20656ab990eb647437"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.578359 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" event={"ID":"3a7aaeef-f949-461a-be01-805e945451d3","Type":"ContainerStarted","Data":"114ed175534034e1aab923b9c83744cb9bf8e992432552d07921b72996dcfe09"} Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.580377 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" podUID="3a7aaeef-f949-461a-be01-805e945451d3" Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.585756 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" event={"ID":"1f197349-94d5-4ef3-962b-89045495d0c9","Type":"ContainerStarted","Data":"6359f275a0d2616cf9a02ed614f1c651c4585e3f902cef3e9651151174506c4e"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.590909 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" event={"ID":"cef2a91a-fb0a-418a-bc2a-83e535750cbd","Type":"ContainerStarted","Data":"3d0d4a9cfb423bfd5fdc0f8d7ef2d7ae3d4a74e0edc9d5f6f4153c8472b9501d"} Oct 03 15:45:19 crc kubenswrapper[5081]: I1003 15:45:19.593240 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" event={"ID":"423af876-fc14-4fba-8835-4127010e0888","Type":"ContainerStarted","Data":"88646968ab108e10011aa1b8b08970f1ac8dadcbb0835f16df81b73484c352d3"} Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.648774 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" podUID="770e893d-89aa-417c-9455-599c14023853" Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.679256 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" podUID="73d438ca-470e-400d-9314-6567907fa58e" Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.681006 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" podUID="cc394603-b291-47e6-b048-1668f1857a84" Oct 03 15:45:19 crc kubenswrapper[5081]: E1003 15:45:19.689032 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" podUID="cef2a91a-fb0a-418a-bc2a-83e535750cbd" Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.628169 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" event={"ID":"73d438ca-470e-400d-9314-6567907fa58e","Type":"ContainerStarted","Data":"19982b43242d060eb6361a7b47252153cbcb337cbb44e70d1c8e32fceff521da"} Oct 03 15:45:20 crc kubenswrapper[5081]: E1003 15:45:20.630886 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" podUID="73d438ca-470e-400d-9314-6567907fa58e" Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.631865 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" event={"ID":"cc394603-b291-47e6-b048-1668f1857a84","Type":"ContainerStarted","Data":"85d555bda8e181cecbc2e9ad96ef7f8907a9ec1ad467a866c7a9207c1588497d"} Oct 03 15:45:20 crc kubenswrapper[5081]: E1003 15:45:20.637994 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" podUID="cc394603-b291-47e6-b048-1668f1857a84" Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.655329 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" event={"ID":"770e893d-89aa-417c-9455-599c14023853","Type":"ContainerStarted","Data":"1867610cb0997e2828a2894cd6281b93fcb20df683172becadc3522071220116"} Oct 03 15:45:20 crc kubenswrapper[5081]: E1003 15:45:20.658281 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" podUID="770e893d-89aa-417c-9455-599c14023853" Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.661846 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" event={"ID":"83299e0a-8094-4228-86f2-e0b290cd3571","Type":"ContainerStarted","Data":"25c563c496c17b1e9a5b531ebb5476983581ead5ee4bb6873b693a84120d28d6"} Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.662116 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" event={"ID":"83299e0a-8094-4228-86f2-e0b290cd3571","Type":"ContainerStarted","Data":"f1137dc5066229b35538a3908ecff4ae91bf413a5250e52a133d763e8e70492d"} Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.662903 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.665487 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" event={"ID":"cef2a91a-fb0a-418a-bc2a-83e535750cbd","Type":"ContainerStarted","Data":"aa6b852c7e012684fc5a86ea858014b554004ad489de4707a9d0f7bf0bd536c6"} Oct 03 15:45:20 crc kubenswrapper[5081]: E1003 15:45:20.687821 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" podUID="cef2a91a-fb0a-418a-bc2a-83e535750cbd" Oct 03 15:45:20 crc kubenswrapper[5081]: E1003 15:45:20.687889 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" podUID="3a7aaeef-f949-461a-be01-805e945451d3" Oct 03 15:45:20 crc kubenswrapper[5081]: I1003 15:45:20.781685 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" podStartSLOduration=3.7816608 podStartE2EDuration="3.7816608s" podCreationTimestamp="2025-10-03 15:45:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:45:20.761678873 +0000 UTC m=+1039.727235496" watchObservedRunningTime="2025-10-03 15:45:20.7816608 +0000 UTC m=+1039.747217413" Oct 03 15:45:21 crc kubenswrapper[5081]: E1003 15:45:21.704528 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" podUID="770e893d-89aa-417c-9455-599c14023853" Oct 03 15:45:21 crc kubenswrapper[5081]: E1003 15:45:21.704942 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" podUID="cc394603-b291-47e6-b048-1668f1857a84" Oct 03 15:45:21 crc kubenswrapper[5081]: E1003 15:45:21.705030 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" podUID="cef2a91a-fb0a-418a-bc2a-83e535750cbd" Oct 03 15:45:21 crc kubenswrapper[5081]: E1003 15:45:21.708339 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" podUID="73d438ca-470e-400d-9314-6567907fa58e" Oct 03 15:45:28 crc kubenswrapper[5081]: I1003 15:45:28.650926 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-54df7874c5-tnb4t" Oct 03 15:45:31 crc kubenswrapper[5081]: E1003 15:45:31.883731 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:585796b996a5b6d7ad68f0cb420bf4f2ee38c9f16f194e3111c162ce91ea8a7b" Oct 03 15:45:31 crc kubenswrapper[5081]: E1003 15:45:31.884740 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:585796b996a5b6d7ad68f0cb420bf4f2ee38c9f16f194e3111c162ce91ea8a7b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kps75,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-58d86cd59d-8pt7x_openstack-operators(fa557e19-6921-4cb5-88b0-10ee3093201c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:45:35 crc kubenswrapper[5081]: E1003 15:45:35.412862 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa" Oct 03 15:45:35 crc kubenswrapper[5081]: E1003 15:45:35.413612 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wnq8t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5ffb97cddf-wzqnn_openstack-operators(09b461bb-473a-4d23-b18c-00d456eb8810): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:45:35 crc kubenswrapper[5081]: I1003 15:45:35.901937 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 15:45:39 crc kubenswrapper[5081]: I1003 15:45:39.864069 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" event={"ID":"4bea9891-fd7e-44ed-9af7-868cb55a9a59","Type":"ContainerStarted","Data":"53fcd7c0e6019c112bad1f9bc21211d925b8f457cd1fb47e7340ce11c6db3f1d"} Oct 03 15:45:42 crc kubenswrapper[5081]: E1003 15:45:42.322930 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" podUID="fa557e19-6921-4cb5-88b0-10ee3093201c" Oct 03 15:45:42 crc kubenswrapper[5081]: I1003 15:45:42.888422 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" event={"ID":"fa557e19-6921-4cb5-88b0-10ee3093201c","Type":"ContainerStarted","Data":"16da427b4d194b1f8f51cd9b4323fbb99d0bfca9139efc04377c29d42673f9d3"} Oct 03 15:46:01 crc kubenswrapper[5081]: I1003 15:46:01.043967 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" event={"ID":"7d518e4e-beff-4962-83a3-e4147b2cefed","Type":"ContainerStarted","Data":"a38dfd7e7053b495688a32f386f312a0eb71bc40844f8b47b7dd10e01e9a0ca0"} Oct 03 15:46:02 crc kubenswrapper[5081]: E1003 15:46:02.821895 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee" Oct 03 15:46:02 crc kubenswrapper[5081]: E1003 15:46:02.823158 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent@sha256:d1fad97d2cd602a4f7b6fd6c202464ac117b20e6608c17aa04cadbceb78a498d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:1c99923410d4cd0a721d2cc8a51d91d3ac800d5fda508c972ebe1e85ed2ca4d0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api@sha256:af4e2467469edf3b1fa739ef819ead98dfa934542ae40ec3266d58f66ba44f99,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator@sha256:99f246f3b9bad7c46b671da12cd166614f0573b3dbf0aa04f4b32d4a9f5a81c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener@sha256:d617f09ab1f6ef522c6f70db597cf20ab79ccebf25e225653cbf2e999354a5c0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier@sha256:1c73b7b1034524ecfb36ce1eaa37ecbbcd5cb3f7fee0149b3bce0b0170bae8ce,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:9e14abeaab473b6731830d9c5bf383bb52111c919c787aee06b833f8cd3f83b1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener@sha256:0838a5c5edf54c1c8af59c93955f26e4eda6645297058780e0f61c77b65683d9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker@sha256:c50baa554100db160210b65733f71d6d128e38f96fa0552819854c62ede75953,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:e43273f867316a0e03469d82dc37487d3cdd2b08b4a153ba270c7cae1749bf92,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute@sha256:de50c7dd282aa3898f1d0a31ecb2a300688f1f234662e6bbe12f35f88b484083,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi@sha256:31c0d98fec7ff16416903874af0addeff03a7e72ede256990f2a71589e8be5ce,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter@sha256:7211a617ec657701ca819aa0ba28e1d5750f5bf2c1391b755cc4a48cc360b0fa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification@sha256:ac586b71d28a6240b29f4b464b19fea812ffc81e1182d172570b4be5ac58ea70,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:f4b02f57187855a6adb5b32d9a8ed92dea2376471c6e33783b4c45f4b56b0166,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup@sha256:a5df039c808a65a273073128a627d6700897d6ebf81a9c62412c7d06be3b9a6e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler@sha256:8f09cdc578caa07e0b5a9ec4e96a251a6d7dd43b2ef1edacb56543c997c259e4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume@sha256:e870d0a1b0c758601a067bfccc539ca04222e0c867872f679cea5833e0fcbf94,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api@sha256:8f112731484f983f272f4c95558ffa098e96e610ddc5130ee0f2b2a239e9058a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9@sha256:277ac4620d95ce3fe2f552f59b82b70962ba024d498710adc45b863bcc7244ff,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central@sha256:09eebb1f87217fbb0249f4ebc19192cd282833aac27103081160b8949dd4361c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns@sha256:e17eb4221e8981df97744e5168a8c759abcd925c2a483d04e3fdecd78128dae4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer@sha256:02f99d84c8cc2c59ac4b8d98f219a1138b0aed8e50f91f9326ef55db5c187cd8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound@sha256:7a636c7f518127d4292aa5417113fd611b85ad49ddbc8273455aa2fe5066a533,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker@sha256:61f617fd809b55b2eceeec84b3283757af80d1001659e80877ac69e9643ba89f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr@sha256:0b083fceb6e323a30f4c7308a275ea88243420ef38df77ac322af302c4c4dd2d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid@sha256:9e173574f9216e5c42498c3794075ead54b6850c66094c4be628b52063f5814c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler@sha256:581b65b646301e0fcb07582150ba63438f1353a85bf9acf1eb2acb4ce71c58bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron@sha256:980d0d43a83e61b74634b46864c2070fcb26348f8bc5a3375f161703e4041d3d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd@sha256:d561737cf54869c67a819635c4a10ca4a9ed21cc6046ffd4f17301670d9a25fd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent@sha256:941076bbb1577abd91f42e0f19b0a191f7e393135d823ed203b122875033888b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn@sha256:2133db6669a24570a266e7c053fc71bbfadd16cd9cd0bc8b87633e73c03c4719,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent@sha256:55682010f0f5aea02f59df1e0a827cc6915048b7545c25432fb0cb8501898d0b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent@sha256:814536e8e4848f6612cd4ada641d46ae7d766878b89918fc5df11f3930747d3a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter@sha256:39c642b2b337e38c18e80266fb14383754178202f40103646337722a594d984c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent@sha256:a4f12a27e60f17034ba47f57dba0c5ae3f9e3c6c681f2e417bb87cb132f502e7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter@sha256:d339ba049bbd1adccb795962bf163f5b22fd84dea865d88b9eb525e46247d6bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:2069e730d5ced0e278392077ad261a3c35bf5df1d88735441859f23e8e3ceb24,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api@sha256:17b8c6c9fbcc7092cba64a264adb9af6decd7db24ee2c60607a9045d55031b51,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn@sha256:f0864392605772b30f07dcb67ec8bb75d5b779756c537983377044d899c1b099,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine@sha256:d9a44db937205e4c4f2cd2d247d230de2eb9207089f35a7ae7cfb11301406fac,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon@sha256:1ab1deb86e7e5ba67b4cd9f5974de6707e5a5948e8f01fc1156dbf5e452340a3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached@sha256:a895c2b3a12aa21f9541a76213b6058ce3252aca002d66025d5935f4ea5873c7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis@sha256:e7c778fd348f881fea490bb9ddf465347068a60fcd65f9cbfedb615815bba2a2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api@sha256:a21c91d6927d863be8aef3023a527bc3466a0ddffc018df0c970ce14396ceee0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor@sha256:7053c79b8354195fd09a5ea1347ad49a35443923d4e4578f80615c63d83313d3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector@sha256:4626ebaa9dbe27fc95b31a48e69397fadef7c9779670c01555f872873c393f74,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent@sha256:c840d7e9775d7f7ed1c6700d973bef79318fe92ac6fc8ed0616dcec13ef95c92,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe@sha256:fcb50aade382ff516554b84b45c742a5adafb460fd67bd0fa2fc7cbb30adf5c1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent@sha256:54373b05fcd33538d153507943da0c118e303a5c61a19c6bbe79a0786fe8ce1d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone@sha256:8c9be58185245280d7282e8973cc6e23e6b08520ce126aeb91cfbcef0c144690,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api@sha256:676ba6130835d00defc3214769d5fe1827ee41420a05f8556f361aac502a7efc,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler@sha256:3dbd2ac58b5f64ab3cf3eef3c44a52f0ccd363568c0739a5d18d6b9c9edddf5d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share@sha256:fded6f454a54e601894e06989243e8896f43940c77cd8f4c904fe43c120b1595,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5d10c016b13499110b5f9ca2bccfaf6d2fd4298c9f02580d7208fe91850da0a6,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils@sha256:43f2c4ec2e38934288015cb5d5ae92941e8b3fa9a613539175641e2c16cfc0cc,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api@sha256:c506a314e354f1ab274c46f9969b254f820e7515bbd9a24c9877dfbb10ece37e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute@sha256:96d4b699758dd3d408b4c672dbe4392fd09783b4dc60783389905d7220b6524c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor@sha256:7a0f3de7dda85fba7ad2929c7b01a2d42c11df9fe83f47a8e499a9da51e7f48c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy@sha256:49b5ae7f895266b90cf3c02503fb7146726e59ad782fdf88112ad6954112d7e4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler@sha256:19b3d48b3c29eaa3a6d76fc145e212389f245c077bbf24eb5c1de0c96f3f7190,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api@sha256:227891a9f4821a92c49ddc27301303287d5632b6ac199e9fe402581f1831ec01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager@sha256:26e3ada4b9fee357ef8bbb1c342b38c49c096ede8a498116e3753ad45354fb47,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping@sha256:2789b45ae2a5a9a80e4864e691f9e32fb9c9e1938cf92bda7c07defdbc78cdc2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog@sha256:8df8259e737625667b13897dc0094bf3d7ced54f414dda93293ad4cb68af1d43,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker@sha256:2fe4f8c71e11a926450d6553e5cb5c7b2db5d0de8426aa969f30d3d566114ff8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:ab5265aef98352336f23b18080f3ba110250859dc0edc20819348311a4a53044,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather@sha256:943eee724277e252795909137538a553ef5284c8103ad01b9be7b0138c66d14d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter@sha256:ecd56e6733c475f2d441344fd98f288c3eac0261ba113695fec7520a954ccbc7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi@sha256:a3c1b94a285064d150145340c06ad5b0afc4aa20caa74523f3972c19b1d1ea61,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:129e24971fee94cc60b5f440605f1512fb932a884e38e64122f38f11f942e3b9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:b96baffbb926f93936bd52f2a1ef4fe1d31bb469d6489e9fb67bf00b99156551,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:d659d1ffbbaff7c76fc96e6600dc9b03c53af2c9d63cfb4626dfb5831b7b1ad7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:e3fcd72e1a2790ca7db5d5c40c1ae597de4b020dd51debcab063352e6e5f7d79,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server@sha256:2504c0db038b850cdd6057fc50e109715a4453c386e4f4d4f901a20dc7b2036a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api@sha256:35c124624fd84930496975032e22d57e517c5958e71ba63124a306a5949c71d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e3accbf4293c544194bd2151d4d0bd8b26828ddacda968bad5d5a6f05c2406db,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account@sha256:75ce8c4f9c68aaba6cab59749e726b2f94d29ba7b7897b18112fe1bd350efd8f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container@sha256:6390af78808d7cd69a4f5c7cb88f47690e54c9b8838b9461f4b21c4127ce770c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object@sha256:14489a8a681c482a643cb47fa90d0a3596b4570e13cfc760541ac80d37cd31b3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server@sha256:87367a67c7cb73476fb8d08ba108da843ac61170381458608e778a33c024c0c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all@sha256:d6123a9349d422888df97ee72d32643dd534f81c521f6f313c5d5e64e2db60c1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api@sha256:b273fd1e1da4190dc4cc67469d180b66b5a22eb6ec9afc76ef36dd6ea2beaea5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier@sha256:9561306ec9455914cd05a0a0b3e56d72c7164aa41d0f0ef9b03ac7d7343538b8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine@sha256:1115e5a2dce397b4a34a082cba1937903818ab5928048fcf775c4a4e6dda2d07,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6vzp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw_openstack-operators(73d438ca-470e-400d-9314-6567907fa58e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:46:02 crc kubenswrapper[5081]: E1003 15:46:02.824760 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" podUID="73d438ca-470e-400d-9314-6567907fa58e" Oct 03 15:46:05 crc kubenswrapper[5081]: E1003 15:46:05.009491 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" podUID="09b461bb-473a-4d23-b18c-00d456eb8810" Oct 03 15:46:05 crc kubenswrapper[5081]: I1003 15:46:05.076915 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" event={"ID":"09b461bb-473a-4d23-b18c-00d456eb8810","Type":"ContainerStarted","Data":"f5feccb131cdb12ffee47ec666f673fe410d7c81629049b2e3f6587c6118dda0"} Oct 03 15:46:05 crc kubenswrapper[5081]: I1003 15:46:05.087879 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" event={"ID":"4bea9891-fd7e-44ed-9af7-868cb55a9a59","Type":"ContainerStarted","Data":"c3121f40edd717d118af9bc8f4d19b989e434cf98441b035a542ef2cae94dad2"} Oct 03 15:46:05 crc kubenswrapper[5081]: I1003 15:46:05.088165 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" Oct 03 15:46:05 crc kubenswrapper[5081]: I1003 15:46:05.093214 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" Oct 03 15:46:05 crc kubenswrapper[5081]: I1003 15:46:05.132042 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-d785ddfd5-mqj79" podStartSLOduration=35.14288752 podStartE2EDuration="49.132020243s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:17.885783883 +0000 UTC m=+1036.851340496" lastFinishedPulling="2025-10-03 15:45:31.874916606 +0000 UTC m=+1050.840473219" observedRunningTime="2025-10-03 15:46:05.119384398 +0000 UTC m=+1084.084941031" watchObservedRunningTime="2025-10-03 15:46:05.132020243 +0000 UTC m=+1084.097576856" Oct 03 15:46:06 crc kubenswrapper[5081]: I1003 15:46:06.134271 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" event={"ID":"1f197349-94d5-4ef3-962b-89045495d0c9","Type":"ContainerStarted","Data":"512c6025878a238f7a0cdc26debf627c7f6ad6ad1e53a2088277f954036a2114"} Oct 03 15:46:06 crc kubenswrapper[5081]: I1003 15:46:06.162844 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" event={"ID":"61233e66-00aa-4863-be3d-56231db9d643","Type":"ContainerStarted","Data":"866fa4d834fe76e826edb194a80af57a7f8226ed62b4f0d53d0397c947e673a1"} Oct 03 15:46:06 crc kubenswrapper[5081]: I1003 15:46:06.190936 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" event={"ID":"5d0a686a-cc92-40dc-a408-9b02863a2337","Type":"ContainerStarted","Data":"0f4e98800294395ff1d6ac0100774f52fba677167cfb7174ceef9be45db3887b"} Oct 03 15:46:06 crc kubenswrapper[5081]: I1003 15:46:06.349912 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" event={"ID":"85db4351-f8f6-436b-9cf1-eb28aa937b21","Type":"ContainerStarted","Data":"a798b0b76a0b9679bdde61c758282f08a8899168dc8909ff82c736ae17280f27"} Oct 03 15:46:06 crc kubenswrapper[5081]: I1003 15:46:06.386976 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" event={"ID":"fa557e19-6921-4cb5-88b0-10ee3093201c","Type":"ContainerStarted","Data":"6ef37e189257c264a3106677fae288f2830ed2e6623508758b096c08f06e4e41"} Oct 03 15:46:06 crc kubenswrapper[5081]: I1003 15:46:06.388329 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" Oct 03 15:46:06 crc kubenswrapper[5081]: I1003 15:46:06.492120 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" podStartSLOduration=3.641303837 podStartE2EDuration="50.492101774s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:18.123240802 +0000 UTC m=+1037.088797415" lastFinishedPulling="2025-10-03 15:46:04.974038739 +0000 UTC m=+1083.939595352" observedRunningTime="2025-10-03 15:46:06.489110887 +0000 UTC m=+1085.454667510" watchObservedRunningTime="2025-10-03 15:46:06.492101774 +0000 UTC m=+1085.457658387" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.444743 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" event={"ID":"a4711b00-4b88-47ef-9d5b-c01b57ac9b18","Type":"ContainerStarted","Data":"f8a969193b52ebc095725ca99d6ec068faee381d88dff37f86caff702a8f62c1"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.444812 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" event={"ID":"a4711b00-4b88-47ef-9d5b-c01b57ac9b18","Type":"ContainerStarted","Data":"4ec12e12bd12a7d4301461ba6dbd93cb43a3ee4a6f168f5bb72830775e034dd6"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.445900 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" event={"ID":"e762bb01-e884-43df-afe3-2c4bc45136a8","Type":"ContainerStarted","Data":"c1542a1f39a51d05545adebc02dd3ddd215224005c77ce1d4869b4698084fe62"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.447171 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" event={"ID":"a5c423f5-481d-4557-ac32-30285a8d7ed9","Type":"ContainerStarted","Data":"5a739201f3a095f36db537c594857fe142cde8e71d4a65386f4baf92e892f2d0"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.448357 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" event={"ID":"717d0fb4-cd0f-42b2-aca0-47e6166fe5d0","Type":"ContainerStarted","Data":"5aa6e7b07e31c48628822c567db78233f95003033d348acfda785f5a48886014"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.449446 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" event={"ID":"5140942d-8224-4889-b650-7ebcd0ce93a1","Type":"ContainerStarted","Data":"c58e0fae7edc7e5464bbb415b85599e1529c48306f01a02f734e3d936b0a8309"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.450736 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" event={"ID":"7d518e4e-beff-4962-83a3-e4147b2cefed","Type":"ContainerStarted","Data":"37aed3ba9f9935cf80f0bc384f6bb54dbd0807f2a9acd86ee0f4b3eccd2196b4"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.451141 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.452981 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.454382 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" event={"ID":"423af876-fc14-4fba-8835-4127010e0888","Type":"ContainerStarted","Data":"568774cf86fd4937d83d616dfa6f1ade5c215825bf9ae7f2d0e25cacbaeeaaca"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.455941 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" event={"ID":"cc394603-b291-47e6-b048-1668f1857a84","Type":"ContainerStarted","Data":"78f8cf96b7da42c9e0bf8d0db2451eb5bcb4d702c872660c7c502c95ea596f66"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.456101 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.457324 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" event={"ID":"770e893d-89aa-417c-9455-599c14023853","Type":"ContainerStarted","Data":"5e311c854da431083e575e4f7b62fd1a06870ca51ed98daa814d40f6e5d9a456"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.457515 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.458662 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" event={"ID":"a810b266-4fbd-4034-add2-362aa5496443","Type":"ContainerStarted","Data":"207dcf0c17151cfc3bfe94d37e1d27f56b2915d2df6b5a5d6eb401e6f9ef9a2f"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.459916 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" event={"ID":"109fc9da-53eb-440c-9e33-60388a4ec529","Type":"ContainerStarted","Data":"1b401aa8e649a164b489381b68c42bcdefe218a5903ffa20019ff26a461258bd"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.461219 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" event={"ID":"e305740c-d1a1-4150-ab8f-0742d6a50db3","Type":"ContainerStarted","Data":"23642708bf61b433088dbfce5c12294da26cabd4c383512921b91aed58b021a0"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.461253 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" event={"ID":"e305740c-d1a1-4150-ab8f-0742d6a50db3","Type":"ContainerStarted","Data":"8cc37cea200c161fc4a66402b8ea5b6b49779593637eee77147335c2c9b7e24a"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.462158 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" event={"ID":"3a7aaeef-f949-461a-be01-805e945451d3","Type":"ContainerStarted","Data":"5185f9f64c0b57acf70036b80647ac382d80690d6347dce8bee40d9e6372d0cf"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.463917 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" event={"ID":"1f197349-94d5-4ef3-962b-89045495d0c9","Type":"ContainerStarted","Data":"5808b217cf969c523a124949d69832e8c245fe94b8800c963f1e2cc2b9d2d16c"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.464461 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.466116 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" event={"ID":"85db4351-f8f6-436b-9cf1-eb28aa937b21","Type":"ContainerStarted","Data":"ebfcecd0dc9edc67fdb925fcfd749423aa54355de3717408c33e3e004fed05bb"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.466519 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.468881 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" event={"ID":"5d0a686a-cc92-40dc-a408-9b02863a2337","Type":"ContainerStarted","Data":"9b5afae544470a4f8aa3c8569dc6675adb29b7473bac8816f070dc75fefa4e26"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.472741 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" event={"ID":"cef2a91a-fb0a-418a-bc2a-83e535750cbd","Type":"ContainerStarted","Data":"a853c004fa1ace429e9e19093c6ed5d83ffd13ec48548e4eb38539603bbd2f8c"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.472967 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.473466 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8686fd99f7-cpmwx" podStartSLOduration=33.745501004 podStartE2EDuration="51.473453834s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:18.171689432 +0000 UTC m=+1037.137246045" lastFinishedPulling="2025-10-03 15:45:35.899642262 +0000 UTC m=+1054.865198875" observedRunningTime="2025-10-03 15:46:07.4712591 +0000 UTC m=+1086.436815723" watchObservedRunningTime="2025-10-03 15:46:07.473453834 +0000 UTC m=+1086.439010447" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.474859 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" event={"ID":"61233e66-00aa-4863-be3d-56231db9d643","Type":"ContainerStarted","Data":"b10f28830c64e50116c9dd2bde78624c835c3165947170a67a1431184f0167df"} Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.490593 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" podStartSLOduration=31.385807614 podStartE2EDuration="51.490539457s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:18.542666229 +0000 UTC m=+1037.508222842" lastFinishedPulling="2025-10-03 15:45:38.647398072 +0000 UTC m=+1057.612954685" observedRunningTime="2025-10-03 15:46:07.490008112 +0000 UTC m=+1086.455564725" watchObservedRunningTime="2025-10-03 15:46:07.490539457 +0000 UTC m=+1086.456096090" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.507812 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" podStartSLOduration=5.765964572 podStartE2EDuration="50.507789565s" podCreationTimestamp="2025-10-03 15:45:17 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.203043756 +0000 UTC m=+1038.168600369" lastFinishedPulling="2025-10-03 15:46:03.944868749 +0000 UTC m=+1082.910425362" observedRunningTime="2025-10-03 15:46:07.506410336 +0000 UTC m=+1086.471966959" watchObservedRunningTime="2025-10-03 15:46:07.507789565 +0000 UTC m=+1086.473346178" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.528948 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" podStartSLOduration=38.748294644 podStartE2EDuration="51.528928796s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.094419268 +0000 UTC m=+1038.059975871" lastFinishedPulling="2025-10-03 15:45:31.87505341 +0000 UTC m=+1050.840610023" observedRunningTime="2025-10-03 15:46:07.525716923 +0000 UTC m=+1086.491273546" watchObservedRunningTime="2025-10-03 15:46:07.528928796 +0000 UTC m=+1086.494485409" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.548572 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tptrj" podStartSLOduration=4.921426596 podStartE2EDuration="50.548530012s" podCreationTimestamp="2025-10-03 15:45:17 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.218953876 +0000 UTC m=+1038.184510489" lastFinishedPulling="2025-10-03 15:46:04.846057292 +0000 UTC m=+1083.811613905" observedRunningTime="2025-10-03 15:46:07.54361603 +0000 UTC m=+1086.509172653" watchObservedRunningTime="2025-10-03 15:46:07.548530012 +0000 UTC m=+1086.514086625" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.565275 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" podStartSLOduration=5.787699542 podStartE2EDuration="51.565246845s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.195820477 +0000 UTC m=+1038.161377090" lastFinishedPulling="2025-10-03 15:46:04.97336778 +0000 UTC m=+1083.938924393" observedRunningTime="2025-10-03 15:46:07.561001633 +0000 UTC m=+1086.526558256" watchObservedRunningTime="2025-10-03 15:46:07.565246845 +0000 UTC m=+1086.530803458" Oct 03 15:46:07 crc kubenswrapper[5081]: I1003 15:46:07.605444 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" podStartSLOduration=5.87241636 podStartE2EDuration="51.605422326s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.221753966 +0000 UTC m=+1038.187310579" lastFinishedPulling="2025-10-03 15:46:04.954759932 +0000 UTC m=+1083.920316545" observedRunningTime="2025-10-03 15:46:07.600825833 +0000 UTC m=+1086.566382466" watchObservedRunningTime="2025-10-03 15:46:07.605422326 +0000 UTC m=+1086.570978929" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.497666 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" event={"ID":"e762bb01-e884-43df-afe3-2c4bc45136a8","Type":"ContainerStarted","Data":"bdf634134e08bc8442be3b15ba4ed736ba03f6f471ecee248b3706611dd4eae8"} Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.498078 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.510085 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" event={"ID":"a5c423f5-481d-4557-ac32-30285a8d7ed9","Type":"ContainerStarted","Data":"48101c5fb2c738ea0575bf692513c418b3cffd9faf8017227954e5030686760e"} Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.510217 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.513254 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" event={"ID":"a810b266-4fbd-4034-add2-362aa5496443","Type":"ContainerStarted","Data":"cd43f28aab851d8119ccc929e122cfd9f5fa490c170fadcaf8fe338bae20870a"} Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.513378 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.520257 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" event={"ID":"717d0fb4-cd0f-42b2-aca0-47e6166fe5d0","Type":"ContainerStarted","Data":"389482413c0f243f2dc34bfcf0cdf2e6a47bf135573140e200273eacc1504047"} Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.520422 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.521314 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" podStartSLOduration=32.008508631 podStartE2EDuration="52.521289443s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:18.166721468 +0000 UTC m=+1037.132278081" lastFinishedPulling="2025-10-03 15:45:38.67950228 +0000 UTC m=+1057.645058893" observedRunningTime="2025-10-03 15:46:08.515548697 +0000 UTC m=+1087.481105300" watchObservedRunningTime="2025-10-03 15:46:08.521289443 +0000 UTC m=+1087.486846056" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.526826 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" event={"ID":"5140942d-8224-4889-b650-7ebcd0ce93a1","Type":"ContainerStarted","Data":"f6401642d26ca5b256574b1d937c995da9c0e4600ee9e6e06ff6aade60557078"} Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.526979 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.531943 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" podStartSLOduration=32.0500312 podStartE2EDuration="51.53191561s" podCreationTimestamp="2025-10-03 15:45:17 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.165549253 +0000 UTC m=+1038.131105866" lastFinishedPulling="2025-10-03 15:45:38.647433663 +0000 UTC m=+1057.612990276" observedRunningTime="2025-10-03 15:46:08.530695045 +0000 UTC m=+1087.496251688" watchObservedRunningTime="2025-10-03 15:46:08.53191561 +0000 UTC m=+1087.497472233" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.536421 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" event={"ID":"109fc9da-53eb-440c-9e33-60388a4ec529","Type":"ContainerStarted","Data":"b199da76354c643a551bcc1025cd6b39cc3325701ddcd304be38c7cb2249646b"} Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.536526 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.540871 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" event={"ID":"423af876-fc14-4fba-8835-4127010e0888","Type":"ContainerStarted","Data":"36c18ece6cb94db94c5df49ef6ac35ac1714252b4c14ae674e87cc45a6e33749"} Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.566095 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" podStartSLOduration=33.063811378 podStartE2EDuration="52.566067387s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.159434606 +0000 UTC m=+1038.124991219" lastFinishedPulling="2025-10-03 15:45:38.661690625 +0000 UTC m=+1057.627247228" observedRunningTime="2025-10-03 15:46:08.558422446 +0000 UTC m=+1087.523979059" watchObservedRunningTime="2025-10-03 15:46:08.566067387 +0000 UTC m=+1087.531624000" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.582208 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" podStartSLOduration=32.965675742 podStartE2EDuration="52.582171272s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.044860096 +0000 UTC m=+1038.010416709" lastFinishedPulling="2025-10-03 15:45:38.661355626 +0000 UTC m=+1057.626912239" observedRunningTime="2025-10-03 15:46:08.577398814 +0000 UTC m=+1087.542955427" watchObservedRunningTime="2025-10-03 15:46:08.582171272 +0000 UTC m=+1087.547727885" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.617239 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" podStartSLOduration=32.0351096 podStartE2EDuration="52.617215814s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.353963976 +0000 UTC m=+1038.319520579" lastFinishedPulling="2025-10-03 15:45:39.93607016 +0000 UTC m=+1058.901626793" observedRunningTime="2025-10-03 15:46:08.610406688 +0000 UTC m=+1087.575963321" watchObservedRunningTime="2025-10-03 15:46:08.617215814 +0000 UTC m=+1087.582772427" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.635498 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" podStartSLOduration=33.075945398 podStartE2EDuration="52.635452691s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.119021229 +0000 UTC m=+1038.084577842" lastFinishedPulling="2025-10-03 15:45:38.678528502 +0000 UTC m=+1057.644085135" observedRunningTime="2025-10-03 15:46:08.628011636 +0000 UTC m=+1087.593568269" watchObservedRunningTime="2025-10-03 15:46:08.635452691 +0000 UTC m=+1087.601009304" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.652264 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" podStartSLOduration=34.355100001 podStartE2EDuration="52.652242976s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.16303055 +0000 UTC m=+1038.128587163" lastFinishedPulling="2025-10-03 15:45:37.460173485 +0000 UTC m=+1056.425730138" observedRunningTime="2025-10-03 15:46:08.651603138 +0000 UTC m=+1087.617159761" watchObservedRunningTime="2025-10-03 15:46:08.652242976 +0000 UTC m=+1087.617799589" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.670661 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" podStartSLOduration=33.12274773 podStartE2EDuration="52.670642908s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.118546825 +0000 UTC m=+1038.084103438" lastFinishedPulling="2025-10-03 15:45:38.666442013 +0000 UTC m=+1057.631998616" observedRunningTime="2025-10-03 15:46:08.669163375 +0000 UTC m=+1087.634719998" watchObservedRunningTime="2025-10-03 15:46:08.670642908 +0000 UTC m=+1087.636199521" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.707412 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" podStartSLOduration=33.205163231 podStartE2EDuration="52.707382749s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.159449467 +0000 UTC m=+1038.125006080" lastFinishedPulling="2025-10-03 15:45:38.661668985 +0000 UTC m=+1057.627225598" observedRunningTime="2025-10-03 15:46:08.701692785 +0000 UTC m=+1087.667249418" watchObservedRunningTime="2025-10-03 15:46:08.707382749 +0000 UTC m=+1087.672939362" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.727306 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" podStartSLOduration=32.427404003 podStartE2EDuration="52.727276744s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:18.361794994 +0000 UTC m=+1037.327351607" lastFinishedPulling="2025-10-03 15:45:38.661667735 +0000 UTC m=+1057.627224348" observedRunningTime="2025-10-03 15:46:08.722325251 +0000 UTC m=+1087.687881864" watchObservedRunningTime="2025-10-03 15:46:08.727276744 +0000 UTC m=+1087.692833367" Oct 03 15:46:08 crc kubenswrapper[5081]: I1003 15:46:08.745754 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" podStartSLOduration=35.920577988 podStartE2EDuration="52.745728357s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.139092539 +0000 UTC m=+1038.104649142" lastFinishedPulling="2025-10-03 15:45:35.964242888 +0000 UTC m=+1054.929799511" observedRunningTime="2025-10-03 15:46:08.740525796 +0000 UTC m=+1087.706082399" watchObservedRunningTime="2025-10-03 15:46:08.745728357 +0000 UTC m=+1087.711284970" Oct 03 15:46:09 crc kubenswrapper[5081]: I1003 15:46:09.565543 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" event={"ID":"09b461bb-473a-4d23-b18c-00d456eb8810","Type":"ContainerStarted","Data":"bb2f7a6cccdb250c4c907e7292c601d41ffe6559fca5d190c1b549b8b14ad4cc"} Oct 03 15:46:09 crc kubenswrapper[5081]: I1003 15:46:09.567029 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" Oct 03 15:46:09 crc kubenswrapper[5081]: I1003 15:46:09.567086 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" Oct 03 15:46:09 crc kubenswrapper[5081]: I1003 15:46:09.591372 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" podStartSLOduration=4.285553128 podStartE2EDuration="53.591341345s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.175973744 +0000 UTC m=+1038.141530347" lastFinishedPulling="2025-10-03 15:46:08.481761951 +0000 UTC m=+1087.447318564" observedRunningTime="2025-10-03 15:46:09.582551111 +0000 UTC m=+1088.548107724" watchObservedRunningTime="2025-10-03 15:46:09.591341345 +0000 UTC m=+1088.556897998" Oct 03 15:46:13 crc kubenswrapper[5081]: E1003 15:46:13.831648 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:f50229c8a33fd581bccbe5f34bbaf3936c1b454802e755c9b48b40b76a8239ee\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" podUID="73d438ca-470e-400d-9314-6567907fa58e" Oct 03 15:46:16 crc kubenswrapper[5081]: I1003 15:46:16.826462 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8pt7x" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.085923 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6d6d64fdcf-xfg45" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.117933 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-6c9969c6c6-qsnkt" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.161360 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.165137 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-59b5fc9845-fxw8p" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.224015 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.227667 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5ffbdb7ddf-s5dz2" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.240894 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-586b66cf4f-t8jvq" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.312065 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-549fb68678-xrlvp" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.350436 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-66fdd975d9-vjfhq" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.465008 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.467183 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-696ff4bcdd-92bxf" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.492052 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-b4444585c-qx7v7" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.613250 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7c9978f67-sz2f7" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.637093 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.641007 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5b45478b88-glxmc" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.733434 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-855d7949fc-2w6tr" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.801603 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-ccbfcb8c-hw6vw" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.878681 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-76d5577b-4ngsx" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.887921 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5ffb97cddf-wzqnn" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.939299 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-j47qs" Oct 03 15:46:17 crc kubenswrapper[5081]: I1003 15:46:17.976942 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5595cf6c95-2c4gm" Oct 03 15:46:30 crc kubenswrapper[5081]: I1003 15:46:30.647807 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:46:30 crc kubenswrapper[5081]: I1003 15:46:30.648613 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:46:32 crc kubenswrapper[5081]: I1003 15:46:32.772992 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" event={"ID":"73d438ca-470e-400d-9314-6567907fa58e","Type":"ContainerStarted","Data":"72a6f514e327d69916b353e108a395290ea38ec6c6beb8357623f2a89de1e10c"} Oct 03 15:46:32 crc kubenswrapper[5081]: I1003 15:46:32.775450 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:46:32 crc kubenswrapper[5081]: I1003 15:46:32.804487 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" podStartSLOduration=4.200872406 podStartE2EDuration="1m16.804461591s" podCreationTimestamp="2025-10-03 15:45:16 +0000 UTC" firstStartedPulling="2025-10-03 15:45:19.219201543 +0000 UTC m=+1038.184758156" lastFinishedPulling="2025-10-03 15:46:31.822790708 +0000 UTC m=+1110.788347341" observedRunningTime="2025-10-03 15:46:32.802522645 +0000 UTC m=+1111.768079288" watchObservedRunningTime="2025-10-03 15:46:32.804461591 +0000 UTC m=+1111.770018204" Oct 03 15:46:38 crc kubenswrapper[5081]: I1003 15:46:38.287658 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.734321 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d84845cb9-l7k47"] Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.744088 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.747542 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.748317 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.748502 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-t7qfk" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.749837 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.756307 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d84845cb9-l7k47"] Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.805715 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8687b65d7f-vb479"] Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.807459 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.812038 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.815050 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8687b65d7f-vb479"] Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.860388 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c97ce8-5085-4303-a0d3-bfa57be82ff1-config\") pod \"dnsmasq-dns-6d84845cb9-l7k47\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.860910 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x25wh\" (UniqueName: \"kubernetes.io/projected/62c97ce8-5085-4303-a0d3-bfa57be82ff1-kube-api-access-x25wh\") pod \"dnsmasq-dns-6d84845cb9-l7k47\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.962767 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-dns-svc\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.963327 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c97ce8-5085-4303-a0d3-bfa57be82ff1-config\") pod \"dnsmasq-dns-6d84845cb9-l7k47\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.963357 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-config\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.963424 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x25wh\" (UniqueName: \"kubernetes.io/projected/62c97ce8-5085-4303-a0d3-bfa57be82ff1-kube-api-access-x25wh\") pod \"dnsmasq-dns-6d84845cb9-l7k47\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.963490 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfmjs\" (UniqueName: \"kubernetes.io/projected/7a3c0074-b5f3-448e-bb08-6a64193b514c-kube-api-access-zfmjs\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.966148 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c97ce8-5085-4303-a0d3-bfa57be82ff1-config\") pod \"dnsmasq-dns-6d84845cb9-l7k47\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:53 crc kubenswrapper[5081]: I1003 15:46:53.989198 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x25wh\" (UniqueName: \"kubernetes.io/projected/62c97ce8-5085-4303-a0d3-bfa57be82ff1-kube-api-access-x25wh\") pod \"dnsmasq-dns-6d84845cb9-l7k47\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.065075 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-config\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.065222 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfmjs\" (UniqueName: \"kubernetes.io/projected/7a3c0074-b5f3-448e-bb08-6a64193b514c-kube-api-access-zfmjs\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.065275 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-dns-svc\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.066313 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-config\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.066481 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-dns-svc\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.068737 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.088413 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfmjs\" (UniqueName: \"kubernetes.io/projected/7a3c0074-b5f3-448e-bb08-6a64193b514c-kube-api-access-zfmjs\") pod \"dnsmasq-dns-8687b65d7f-vb479\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.138308 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.423174 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8687b65d7f-vb479"] Oct 03 15:46:54 crc kubenswrapper[5081]: W1003 15:46:54.428550 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a3c0074_b5f3_448e_bb08_6a64193b514c.slice/crio-eaacf268c8bb37312b78e2c5b1a03dbb4580894e234c2cf1cfdcdd3429d49bdb WatchSource:0}: Error finding container eaacf268c8bb37312b78e2c5b1a03dbb4580894e234c2cf1cfdcdd3429d49bdb: Status 404 returned error can't find the container with id eaacf268c8bb37312b78e2c5b1a03dbb4580894e234c2cf1cfdcdd3429d49bdb Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.536609 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d84845cb9-l7k47"] Oct 03 15:46:54 crc kubenswrapper[5081]: W1003 15:46:54.541309 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62c97ce8_5085_4303_a0d3_bfa57be82ff1.slice/crio-d1abdf265b77de284e4ab24bfb48d0a6e97463434bfbc2dcc702a556397ee6cb WatchSource:0}: Error finding container d1abdf265b77de284e4ab24bfb48d0a6e97463434bfbc2dcc702a556397ee6cb: Status 404 returned error can't find the container with id d1abdf265b77de284e4ab24bfb48d0a6e97463434bfbc2dcc702a556397ee6cb Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.942266 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" event={"ID":"62c97ce8-5085-4303-a0d3-bfa57be82ff1","Type":"ContainerStarted","Data":"d1abdf265b77de284e4ab24bfb48d0a6e97463434bfbc2dcc702a556397ee6cb"} Oct 03 15:46:54 crc kubenswrapper[5081]: I1003 15:46:54.943369 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8687b65d7f-vb479" event={"ID":"7a3c0074-b5f3-448e-bb08-6a64193b514c","Type":"ContainerStarted","Data":"eaacf268c8bb37312b78e2c5b1a03dbb4580894e234c2cf1cfdcdd3429d49bdb"} Oct 03 15:46:56 crc kubenswrapper[5081]: I1003 15:46:56.682892 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d84845cb9-l7k47"] Oct 03 15:46:56 crc kubenswrapper[5081]: I1003 15:46:56.737155 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b749bd587-mccxf"] Oct 03 15:46:56 crc kubenswrapper[5081]: I1003 15:46:56.738943 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:56 crc kubenswrapper[5081]: I1003 15:46:56.776009 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b749bd587-mccxf"] Oct 03 15:46:56 crc kubenswrapper[5081]: I1003 15:46:56.917131 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-config\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:56 crc kubenswrapper[5081]: I1003 15:46:56.917195 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-dns-svc\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:56 crc kubenswrapper[5081]: I1003 15:46:56.917227 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4gnt\" (UniqueName: \"kubernetes.io/projected/c12ae457-80da-4a13-97d6-7fdacc9c43c4-kube-api-access-h4gnt\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.018583 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-config\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.018644 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-dns-svc\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.018686 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4gnt\" (UniqueName: \"kubernetes.io/projected/c12ae457-80da-4a13-97d6-7fdacc9c43c4-kube-api-access-h4gnt\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.021014 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-config\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.021517 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-dns-svc\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.049432 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4gnt\" (UniqueName: \"kubernetes.io/projected/c12ae457-80da-4a13-97d6-7fdacc9c43c4-kube-api-access-h4gnt\") pod \"dnsmasq-dns-7b749bd587-mccxf\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.058594 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.114721 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8687b65d7f-vb479"] Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.145072 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cb7995759-v2fd8"] Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.146551 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.182436 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cb7995759-v2fd8"] Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.332218 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-dns-svc\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.332478 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-config\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.333031 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlsvn\" (UniqueName: \"kubernetes.io/projected/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-kube-api-access-xlsvn\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.434840 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlsvn\" (UniqueName: \"kubernetes.io/projected/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-kube-api-access-xlsvn\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.434901 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-dns-svc\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.434985 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-config\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.435978 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-config\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.437232 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-dns-svc\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.464512 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlsvn\" (UniqueName: \"kubernetes.io/projected/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-kube-api-access-xlsvn\") pod \"dnsmasq-dns-5cb7995759-v2fd8\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.496361 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.859117 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b749bd587-mccxf"] Oct 03 15:46:57 crc kubenswrapper[5081]: I1003 15:46:57.990935 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" event={"ID":"c12ae457-80da-4a13-97d6-7fdacc9c43c4","Type":"ContainerStarted","Data":"a9443b064176971e15468d4e07d8af926458f84f837a71ca5e355cc4d85017c0"} Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.012212 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.013790 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.016190 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.016202 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.016485 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.016680 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-q589g" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.016810 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.016933 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.020286 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.024095 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.083083 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cb7995759-v2fd8"] Oct 03 15:46:58 crc kubenswrapper[5081]: W1003 15:46:58.101010 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cfbd5d7_e5c8_4d9e_9519_705da27c6e53.slice/crio-e9e7849fdb38afffa7fd9df66dad896eade2cb5fe082b761e1874948fa287a7f WatchSource:0}: Error finding container e9e7849fdb38afffa7fd9df66dad896eade2cb5fe082b761e1874948fa287a7f: Status 404 returned error can't find the container with id e9e7849fdb38afffa7fd9df66dad896eade2cb5fe082b761e1874948fa287a7f Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.146656 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.146719 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.146866 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.146979 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.147016 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.147155 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.147253 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.147280 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.147317 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.147340 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.147390 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbxsb\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-kube-api-access-pbxsb\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248549 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248625 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248648 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248696 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248742 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248762 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248787 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248852 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248923 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbxsb\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-kube-api-access-pbxsb\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248965 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.248983 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.250156 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.251284 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.251458 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.251510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.251804 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.251803 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.264710 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.264842 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.265741 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.267927 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.273021 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.273309 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-tn5jb" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.273621 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.274703 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.274941 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.275144 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.276587 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.277761 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.285681 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.287818 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbxsb\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-kube-api-access-pbxsb\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.303797 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.307574 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.350858 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.350920 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.350952 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.350978 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.351018 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5344c615-93ae-4a4a-95b1-3bbe3327f42e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.351040 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.351078 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.351109 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.351132 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5344c615-93ae-4a4a-95b1-3bbe3327f42e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.351166 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6vsv\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-kube-api-access-j6vsv\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.351776 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.399614 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453085 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5344c615-93ae-4a4a-95b1-3bbe3327f42e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453151 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453195 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453231 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453257 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5344c615-93ae-4a4a-95b1-3bbe3327f42e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453282 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6vsv\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-kube-api-access-j6vsv\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453352 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453394 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453417 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453442 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.453462 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.454507 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.454698 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.454805 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.455292 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.455628 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.458215 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5344c615-93ae-4a4a-95b1-3bbe3327f42e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.459120 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.461334 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.467231 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.469633 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5344c615-93ae-4a4a-95b1-3bbe3327f42e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.475722 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6vsv\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-kube-api-access-j6vsv\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.483318 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.659231 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:46:58 crc kubenswrapper[5081]: I1003 15:46:58.678652 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 15:46:58 crc kubenswrapper[5081]: W1003 15:46:58.700114 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7060c6c8_bbe8_47ae_8ef2_4358291dbb61.slice/crio-69af4ca883d265ec585015516f9d0443b34a57fe43f25c6b46a3ec2ad9d36eea WatchSource:0}: Error finding container 69af4ca883d265ec585015516f9d0443b34a57fe43f25c6b46a3ec2ad9d36eea: Status 404 returned error can't find the container with id 69af4ca883d265ec585015516f9d0443b34a57fe43f25c6b46a3ec2ad9d36eea Oct 03 15:46:59 crc kubenswrapper[5081]: I1003 15:46:59.006841 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7060c6c8-bbe8-47ae-8ef2-4358291dbb61","Type":"ContainerStarted","Data":"69af4ca883d265ec585015516f9d0443b34a57fe43f25c6b46a3ec2ad9d36eea"} Oct 03 15:46:59 crc kubenswrapper[5081]: I1003 15:46:59.010051 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" event={"ID":"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53","Type":"ContainerStarted","Data":"e9e7849fdb38afffa7fd9df66dad896eade2cb5fe082b761e1874948fa287a7f"} Oct 03 15:46:59 crc kubenswrapper[5081]: I1003 15:46:59.176301 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 15:46:59 crc kubenswrapper[5081]: W1003 15:46:59.200099 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5344c615_93ae_4a4a_95b1_3bbe3327f42e.slice/crio-e135e27dd60f8ae622221c072f1620cfa082534752056de1943e3a06181dc780 WatchSource:0}: Error finding container e135e27dd60f8ae622221c072f1620cfa082534752056de1943e3a06181dc780: Status 404 returned error can't find the container with id e135e27dd60f8ae622221c072f1620cfa082534752056de1943e3a06181dc780 Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.019278 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5344c615-93ae-4a4a-95b1-3bbe3327f42e","Type":"ContainerStarted","Data":"e135e27dd60f8ae622221c072f1620cfa082534752056de1943e3a06181dc780"} Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.543424 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.545354 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.548829 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.550036 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.550273 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.550872 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.551019 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-bg5gv" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.556891 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.568365 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.574891 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.576892 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.581473 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.581646 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-q95sk" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.581804 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.581898 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.601185 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-default\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.601463 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdzkk\" (UniqueName: \"kubernetes.io/projected/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kube-api-access-vdzkk\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.601638 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-secrets\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.601726 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.601832 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.601926 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.602034 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.602120 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.602257 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.602355 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kolla-config\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.647623 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.647695 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704013 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49mxf\" (UniqueName: \"kubernetes.io/projected/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kube-api-access-49mxf\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704100 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-secrets\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704272 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704375 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704403 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704425 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704519 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704551 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704596 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704622 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704643 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704698 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704725 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kolla-config\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704753 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704805 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704832 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704856 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-default\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.704881 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdzkk\" (UniqueName: \"kubernetes.io/projected/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kube-api-access-vdzkk\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.705020 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.705973 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.706273 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kolla-config\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.706510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.706761 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-default\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.712357 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.712706 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-secrets\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.722292 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.725289 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdzkk\" (UniqueName: \"kubernetes.io/projected/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kube-api-access-vdzkk\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.732346 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.806467 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807171 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807276 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49mxf\" (UniqueName: \"kubernetes.io/projected/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kube-api-access-49mxf\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807395 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807475 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807596 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807787 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807873 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.807993 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.808116 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.808430 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.808508 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.808837 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.809683 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.812966 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.813299 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.814308 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.831423 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49mxf\" (UniqueName: \"kubernetes.io/projected/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kube-api-access-49mxf\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.835061 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.873435 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 15:47:00 crc kubenswrapper[5081]: I1003 15:47:00.903878 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.352691 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.357406 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.362490 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.364473 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-4n6dp" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.368400 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.379772 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.424315 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-kolla-config\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.424742 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.424842 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msrt7\" (UniqueName: \"kubernetes.io/projected/ca0c9b41-c081-4a81-90f2-730e16c7d347-kube-api-access-msrt7\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.425807 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.425923 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-config-data\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.521523 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.528236 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.528303 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msrt7\" (UniqueName: \"kubernetes.io/projected/ca0c9b41-c081-4a81-90f2-730e16c7d347-kube-api-access-msrt7\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.528374 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.528416 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-config-data\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.528498 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-kolla-config\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.529699 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-kolla-config\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.533625 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-config-data\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.535955 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.541919 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.551116 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msrt7\" (UniqueName: \"kubernetes.io/projected/ca0c9b41-c081-4a81-90f2-730e16c7d347-kube-api-access-msrt7\") pod \"memcached-0\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " pod="openstack/memcached-0" Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.580377 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 15:47:01 crc kubenswrapper[5081]: W1003 15:47:01.583935 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14a4c256_cdd6_4275_a2e6_6f7dd0f2a4d1.slice/crio-21d95d68569656a54923a26532efc674166d6ab27e3d18d4627f4a5fcb1dcb60 WatchSource:0}: Error finding container 21d95d68569656a54923a26532efc674166d6ab27e3d18d4627f4a5fcb1dcb60: Status 404 returned error can't find the container with id 21d95d68569656a54923a26532efc674166d6ab27e3d18d4627f4a5fcb1dcb60 Oct 03 15:47:01 crc kubenswrapper[5081]: I1003 15:47:01.687394 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 15:47:02 crc kubenswrapper[5081]: I1003 15:47:02.035612 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5","Type":"ContainerStarted","Data":"c2b2dbef3cf95a2c7bd9fa9ae0eba334e37bf7b5cf4fd95c2daad84683fcef05"} Oct 03 15:47:02 crc kubenswrapper[5081]: I1003 15:47:02.037267 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1","Type":"ContainerStarted","Data":"21d95d68569656a54923a26532efc674166d6ab27e3d18d4627f4a5fcb1dcb60"} Oct 03 15:47:02 crc kubenswrapper[5081]: I1003 15:47:02.135092 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 15:47:02 crc kubenswrapper[5081]: W1003 15:47:02.145510 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca0c9b41_c081_4a81_90f2_730e16c7d347.slice/crio-c047b38ab189810d0c64c1322b3968c39310579429c28ad02431445744f4867b WatchSource:0}: Error finding container c047b38ab189810d0c64c1322b3968c39310579429c28ad02431445744f4867b: Status 404 returned error can't find the container with id c047b38ab189810d0c64c1322b3968c39310579429c28ad02431445744f4867b Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.045974 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ca0c9b41-c081-4a81-90f2-730e16c7d347","Type":"ContainerStarted","Data":"c047b38ab189810d0c64c1322b3968c39310579429c28ad02431445744f4867b"} Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.237990 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.239377 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.243334 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-nlgst" Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.254468 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.365117 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb7t9\" (UniqueName: \"kubernetes.io/projected/5e54a24e-4043-40a3-8715-bb461d3f1bde-kube-api-access-tb7t9\") pod \"kube-state-metrics-0\" (UID: \"5e54a24e-4043-40a3-8715-bb461d3f1bde\") " pod="openstack/kube-state-metrics-0" Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.467597 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb7t9\" (UniqueName: \"kubernetes.io/projected/5e54a24e-4043-40a3-8715-bb461d3f1bde-kube-api-access-tb7t9\") pod \"kube-state-metrics-0\" (UID: \"5e54a24e-4043-40a3-8715-bb461d3f1bde\") " pod="openstack/kube-state-metrics-0" Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.498196 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb7t9\" (UniqueName: \"kubernetes.io/projected/5e54a24e-4043-40a3-8715-bb461d3f1bde-kube-api-access-tb7t9\") pod \"kube-state-metrics-0\" (UID: \"5e54a24e-4043-40a3-8715-bb461d3f1bde\") " pod="openstack/kube-state-metrics-0" Oct 03 15:47:03 crc kubenswrapper[5081]: I1003 15:47:03.569296 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:47:04 crc kubenswrapper[5081]: I1003 15:47:04.059053 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:47:05 crc kubenswrapper[5081]: I1003 15:47:05.096937 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e54a24e-4043-40a3-8715-bb461d3f1bde","Type":"ContainerStarted","Data":"09637ff84fb5c81e7dafeae53f186da31375de17584dca598589319d3f76d2be"} Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.883275 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-kngsq"] Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.885298 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.890972 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.891138 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.893808 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-sq5fs" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.899734 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-52hh5"] Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.905131 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.908841 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kngsq"] Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.918196 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-52hh5"] Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938112 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-log\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938186 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-lib\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938214 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-scripts\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938277 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-log-ovn\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938298 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-combined-ca-bundle\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938317 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938338 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run-ovn\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938361 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bbvl\" (UniqueName: \"kubernetes.io/projected/f41a1c07-9bcc-4237-869e-dff5d9c480f8-kube-api-access-7bbvl\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938393 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f41a1c07-9bcc-4237-869e-dff5d9c480f8-scripts\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938532 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-run\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938778 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96lpm\" (UniqueName: \"kubernetes.io/projected/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-kube-api-access-96lpm\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938828 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-etc-ovs\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:06 crc kubenswrapper[5081]: I1003 15:47:06.938866 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-ovn-controller-tls-certs\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041011 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96lpm\" (UniqueName: \"kubernetes.io/projected/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-kube-api-access-96lpm\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041071 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-etc-ovs\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041105 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-ovn-controller-tls-certs\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041127 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-log\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041160 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-lib\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041184 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-scripts\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041204 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-log-ovn\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041224 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-combined-ca-bundle\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041242 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041265 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run-ovn\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041287 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bbvl\" (UniqueName: \"kubernetes.io/projected/f41a1c07-9bcc-4237-869e-dff5d9c480f8-kube-api-access-7bbvl\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041314 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f41a1c07-9bcc-4237-869e-dff5d9c480f8-scripts\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.041338 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-run\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.042057 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-log-ovn\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.042158 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-run\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.042221 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-etc-ovs\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.042304 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run-ovn\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.042397 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.042521 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-log\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.042972 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-lib\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.045446 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-scripts\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.045446 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f41a1c07-9bcc-4237-869e-dff5d9c480f8-scripts\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.049434 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-combined-ca-bundle\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.050300 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-ovn-controller-tls-certs\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.061367 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96lpm\" (UniqueName: \"kubernetes.io/projected/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-kube-api-access-96lpm\") pod \"ovn-controller-kngsq\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.073167 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bbvl\" (UniqueName: \"kubernetes.io/projected/f41a1c07-9bcc-4237-869e-dff5d9c480f8-kube-api-access-7bbvl\") pod \"ovn-controller-ovs-52hh5\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.209893 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.232735 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.420853 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.422864 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.424856 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.426154 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-cs9sv" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.426329 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.426504 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.426667 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.429481 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.450852 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.450996 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.451026 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-config\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.451092 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.451147 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b96p4\" (UniqueName: \"kubernetes.io/projected/262a88da-d2e8-428f-b38a-1b59714ddfe7-kube-api-access-b96p4\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.451201 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.452054 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.452129 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553105 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553229 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553269 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553326 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553366 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553391 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-config\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553421 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.553451 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b96p4\" (UniqueName: \"kubernetes.io/projected/262a88da-d2e8-428f-b38a-1b59714ddfe7-kube-api-access-b96p4\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.554494 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.554905 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.554901 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.555787 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-config\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.567216 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.568371 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.568522 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.575520 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b96p4\" (UniqueName: \"kubernetes.io/projected/262a88da-d2e8-428f-b38a-1b59714ddfe7-kube-api-access-b96p4\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.610479 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:07 crc kubenswrapper[5081]: I1003 15:47:07.746371 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.444451 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.446477 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.449478 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.450362 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.450544 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.450745 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-k7ltk" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.462749 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620224 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620336 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp7k4\" (UniqueName: \"kubernetes.io/projected/f3630909-8ada-4296-af7c-8135f2221e39-kube-api-access-dp7k4\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620411 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620433 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620457 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f3630909-8ada-4296-af7c-8135f2221e39-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620489 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620553 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.620594 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-config\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723103 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723171 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-config\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723228 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723283 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp7k4\" (UniqueName: \"kubernetes.io/projected/f3630909-8ada-4296-af7c-8135f2221e39-kube-api-access-dp7k4\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723348 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723372 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723402 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f3630909-8ada-4296-af7c-8135f2221e39-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723441 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.723715 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.724034 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f3630909-8ada-4296-af7c-8135f2221e39-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.724348 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-config\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.724988 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.730664 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.732351 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.744862 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.749541 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp7k4\" (UniqueName: \"kubernetes.io/projected/f3630909-8ada-4296-af7c-8135f2221e39-kube-api-access-dp7k4\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.758084 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:10 crc kubenswrapper[5081]: I1003 15:47:10.781714 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:18 crc kubenswrapper[5081]: E1003 15:47:18.304042 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e3accbf4293c544194bd2151d4d0bd8b26828ddacda968bad5d5a6f05c2406db" Oct 03 15:47:18 crc kubenswrapper[5081]: E1003 15:47:18.305085 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e3accbf4293c544194bd2151d4d0bd8b26828ddacda968bad5d5a6f05c2406db,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pbxsb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(7060c6c8-bbe8-47ae-8ef2-4358291dbb61): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:47:18 crc kubenswrapper[5081]: E1003 15:47:18.306343 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" Oct 03 15:47:19 crc kubenswrapper[5081]: E1003 15:47:19.037446 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5" Oct 03 15:47:19 crc kubenswrapper[5081]: E1003 15:47:19.037777 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zfmjs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8687b65d7f-vb479_openstack(7a3c0074-b5f3-448e-bb08-6a64193b514c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:47:19 crc kubenswrapper[5081]: E1003 15:47:19.037927 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5" Oct 03 15:47:19 crc kubenswrapper[5081]: E1003 15:47:19.038129 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x25wh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6d84845cb9-l7k47_openstack(62c97ce8-5085-4303-a0d3-bfa57be82ff1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:47:19 crc kubenswrapper[5081]: E1003 15:47:19.039040 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-8687b65d7f-vb479" podUID="7a3c0074-b5f3-448e-bb08-6a64193b514c" Oct 03 15:47:19 crc kubenswrapper[5081]: E1003 15:47:19.040155 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" podUID="62c97ce8-5085-4303-a0d3-bfa57be82ff1" Oct 03 15:47:19 crc kubenswrapper[5081]: E1003 15:47:19.240771 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e3accbf4293c544194bd2151d4d0bd8b26828ddacda968bad5d5a6f05c2406db\\\"\"" pod="openstack/rabbitmq-server-0" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" Oct 03 15:47:21 crc kubenswrapper[5081]: E1003 15:47:21.987408 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5" Oct 03 15:47:21 crc kubenswrapper[5081]: E1003 15:47:21.987921 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xlsvn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5cb7995759-v2fd8_openstack(1cfbd5d7-e5c8-4d9e-9519-705da27c6e53): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:47:21 crc kubenswrapper[5081]: E1003 15:47:21.989161 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" podUID="1cfbd5d7-e5c8-4d9e-9519-705da27c6e53" Oct 03 15:47:22 crc kubenswrapper[5081]: E1003 15:47:22.276584 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5\\\"\"" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" podUID="1cfbd5d7-e5c8-4d9e-9519-705da27c6e53" Oct 03 15:47:22 crc kubenswrapper[5081]: E1003 15:47:22.786968 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5" Oct 03 15:47:22 crc kubenswrapper[5081]: E1003 15:47:22.787476 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h4gnt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7b749bd587-mccxf_openstack(c12ae457-80da-4a13-97d6-7fdacc9c43c4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:47:22 crc kubenswrapper[5081]: E1003 15:47:22.788691 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" podUID="c12ae457-80da-4a13-97d6-7fdacc9c43c4" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.829914 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.837460 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.924379 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfmjs\" (UniqueName: \"kubernetes.io/projected/7a3c0074-b5f3-448e-bb08-6a64193b514c-kube-api-access-zfmjs\") pod \"7a3c0074-b5f3-448e-bb08-6a64193b514c\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.924486 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c97ce8-5085-4303-a0d3-bfa57be82ff1-config\") pod \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.924625 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-config\") pod \"7a3c0074-b5f3-448e-bb08-6a64193b514c\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.924728 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-dns-svc\") pod \"7a3c0074-b5f3-448e-bb08-6a64193b514c\" (UID: \"7a3c0074-b5f3-448e-bb08-6a64193b514c\") " Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.924806 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x25wh\" (UniqueName: \"kubernetes.io/projected/62c97ce8-5085-4303-a0d3-bfa57be82ff1-kube-api-access-x25wh\") pod \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\" (UID: \"62c97ce8-5085-4303-a0d3-bfa57be82ff1\") " Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.925101 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c97ce8-5085-4303-a0d3-bfa57be82ff1-config" (OuterVolumeSpecName: "config") pod "62c97ce8-5085-4303-a0d3-bfa57be82ff1" (UID: "62c97ce8-5085-4303-a0d3-bfa57be82ff1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.925782 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62c97ce8-5085-4303-a0d3-bfa57be82ff1-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.926573 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-config" (OuterVolumeSpecName: "config") pod "7a3c0074-b5f3-448e-bb08-6a64193b514c" (UID: "7a3c0074-b5f3-448e-bb08-6a64193b514c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.927029 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7a3c0074-b5f3-448e-bb08-6a64193b514c" (UID: "7a3c0074-b5f3-448e-bb08-6a64193b514c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.928871 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a3c0074-b5f3-448e-bb08-6a64193b514c-kube-api-access-zfmjs" (OuterVolumeSpecName: "kube-api-access-zfmjs") pod "7a3c0074-b5f3-448e-bb08-6a64193b514c" (UID: "7a3c0074-b5f3-448e-bb08-6a64193b514c"). InnerVolumeSpecName "kube-api-access-zfmjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:47:22 crc kubenswrapper[5081]: I1003 15:47:22.931509 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c97ce8-5085-4303-a0d3-bfa57be82ff1-kube-api-access-x25wh" (OuterVolumeSpecName: "kube-api-access-x25wh") pod "62c97ce8-5085-4303-a0d3-bfa57be82ff1" (UID: "62c97ce8-5085-4303-a0d3-bfa57be82ff1"). InnerVolumeSpecName "kube-api-access-x25wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.028755 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.031146 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a3c0074-b5f3-448e-bb08-6a64193b514c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.031168 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x25wh\" (UniqueName: \"kubernetes.io/projected/62c97ce8-5085-4303-a0d3-bfa57be82ff1-kube-api-access-x25wh\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.031179 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfmjs\" (UniqueName: \"kubernetes.io/projected/7a3c0074-b5f3-448e-bb08-6a64193b514c-kube-api-access-zfmjs\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.288473 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" event={"ID":"62c97ce8-5085-4303-a0d3-bfa57be82ff1","Type":"ContainerDied","Data":"d1abdf265b77de284e4ab24bfb48d0a6e97463434bfbc2dcc702a556397ee6cb"} Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.288522 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d84845cb9-l7k47" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.289741 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8687b65d7f-vb479" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.290684 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8687b65d7f-vb479" event={"ID":"7a3c0074-b5f3-448e-bb08-6a64193b514c","Type":"ContainerDied","Data":"eaacf268c8bb37312b78e2c5b1a03dbb4580894e234c2cf1cfdcdd3429d49bdb"} Oct 03 15:47:23 crc kubenswrapper[5081]: E1003 15:47:23.291342 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5\\\"\"" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" podUID="c12ae457-80da-4a13-97d6-7fdacc9c43c4" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.369476 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8687b65d7f-vb479"] Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.376538 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8687b65d7f-vb479"] Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.463654 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d84845cb9-l7k47"] Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.480398 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d84845cb9-l7k47"] Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.513226 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kngsq"] Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.739204 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 15:47:23 crc kubenswrapper[5081]: W1003 15:47:23.748229 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod686d3fe6_8cc9_4013_a5f3_55fe41ac840e.slice/crio-3ce3b0ab74af7cb73921966b700406748ad8e7caeb98f8354f43b97238d35964 WatchSource:0}: Error finding container 3ce3b0ab74af7cb73921966b700406748ad8e7caeb98f8354f43b97238d35964: Status 404 returned error can't find the container with id 3ce3b0ab74af7cb73921966b700406748ad8e7caeb98f8354f43b97238d35964 Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.841120 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c97ce8-5085-4303-a0d3-bfa57be82ff1" path="/var/lib/kubelet/pods/62c97ce8-5085-4303-a0d3-bfa57be82ff1/volumes" Oct 03 15:47:23 crc kubenswrapper[5081]: I1003 15:47:23.842049 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a3c0074-b5f3-448e-bb08-6a64193b514c" path="/var/lib/kubelet/pods/7a3c0074-b5f3-448e-bb08-6a64193b514c/volumes" Oct 03 15:47:23 crc kubenswrapper[5081]: W1003 15:47:23.866941 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod262a88da_d2e8_428f_b38a_1b59714ddfe7.slice/crio-fa3d3d2bcd76a6f259158815318000e06e1f52e78c04db4591f1e1a3e78f6c69 WatchSource:0}: Error finding container fa3d3d2bcd76a6f259158815318000e06e1f52e78c04db4591f1e1a3e78f6c69: Status 404 returned error can't find the container with id fa3d3d2bcd76a6f259158815318000e06e1f52e78c04db4591f1e1a3e78f6c69 Oct 03 15:47:24 crc kubenswrapper[5081]: I1003 15:47:24.299613 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ca0c9b41-c081-4a81-90f2-730e16c7d347","Type":"ContainerStarted","Data":"e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727"} Oct 03 15:47:24 crc kubenswrapper[5081]: I1003 15:47:24.300016 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 03 15:47:24 crc kubenswrapper[5081]: I1003 15:47:24.303438 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq" event={"ID":"686d3fe6-8cc9-4013-a5f3-55fe41ac840e","Type":"ContainerStarted","Data":"3ce3b0ab74af7cb73921966b700406748ad8e7caeb98f8354f43b97238d35964"} Oct 03 15:47:24 crc kubenswrapper[5081]: I1003 15:47:24.305044 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"262a88da-d2e8-428f-b38a-1b59714ddfe7","Type":"ContainerStarted","Data":"fa3d3d2bcd76a6f259158815318000e06e1f52e78c04db4591f1e1a3e78f6c69"} Oct 03 15:47:24 crc kubenswrapper[5081]: I1003 15:47:24.339237 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.581764453 podStartE2EDuration="23.339213085s" podCreationTimestamp="2025-10-03 15:47:01 +0000 UTC" firstStartedPulling="2025-10-03 15:47:02.148098996 +0000 UTC m=+1141.113655609" lastFinishedPulling="2025-10-03 15:47:22.905547628 +0000 UTC m=+1161.871104241" observedRunningTime="2025-10-03 15:47:24.328891646 +0000 UTC m=+1163.294448259" watchObservedRunningTime="2025-10-03 15:47:24.339213085 +0000 UTC m=+1163.304769698" Oct 03 15:47:24 crc kubenswrapper[5081]: I1003 15:47:24.563793 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-52hh5"] Oct 03 15:47:24 crc kubenswrapper[5081]: W1003 15:47:24.690282 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf41a1c07_9bcc_4237_869e_dff5d9c480f8.slice/crio-614d239c57dcf51221b6fce0fa442cf71d5f3dc97dea8711c6acbf0f7c511e2d WatchSource:0}: Error finding container 614d239c57dcf51221b6fce0fa442cf71d5f3dc97dea8711c6acbf0f7c511e2d: Status 404 returned error can't find the container with id 614d239c57dcf51221b6fce0fa442cf71d5f3dc97dea8711c6acbf0f7c511e2d Oct 03 15:47:24 crc kubenswrapper[5081]: I1003 15:47:24.765480 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 15:47:24 crc kubenswrapper[5081]: W1003 15:47:24.777846 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3630909_8ada_4296_af7c_8135f2221e39.slice/crio-d58f96855ad64e1201fbbfdc8801cbf93f8573345bac3fac1a0c5b7b45437708 WatchSource:0}: Error finding container d58f96855ad64e1201fbbfdc8801cbf93f8573345bac3fac1a0c5b7b45437708: Status 404 returned error can't find the container with id d58f96855ad64e1201fbbfdc8801cbf93f8573345bac3fac1a0c5b7b45437708 Oct 03 15:47:25 crc kubenswrapper[5081]: I1003 15:47:25.313886 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5","Type":"ContainerStarted","Data":"5334f37d8987ae4171ae3899b8da5f80778f1221e5d6c5d21da758b59468cc2f"} Oct 03 15:47:25 crc kubenswrapper[5081]: I1003 15:47:25.314894 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f3630909-8ada-4296-af7c-8135f2221e39","Type":"ContainerStarted","Data":"d58f96855ad64e1201fbbfdc8801cbf93f8573345bac3fac1a0c5b7b45437708"} Oct 03 15:47:25 crc kubenswrapper[5081]: I1003 15:47:25.318791 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1","Type":"ContainerStarted","Data":"4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320"} Oct 03 15:47:25 crc kubenswrapper[5081]: I1003 15:47:25.321692 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-52hh5" event={"ID":"f41a1c07-9bcc-4237-869e-dff5d9c480f8","Type":"ContainerStarted","Data":"614d239c57dcf51221b6fce0fa442cf71d5f3dc97dea8711c6acbf0f7c511e2d"} Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.133373 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-4j44h"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.134767 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.137268 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.156222 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-4j44h"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.267702 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovn-rundir\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.267752 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-config\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.267776 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-combined-ca-bundle\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.268218 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.268540 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpfgt\" (UniqueName: \"kubernetes.io/projected/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-kube-api-access-hpfgt\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.268603 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovs-rundir\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.285856 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b749bd587-mccxf"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.309444 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d8b9bcdd7-lnql9"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.311413 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.315252 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.340634 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d8b9bcdd7-lnql9"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.370446 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpfgt\" (UniqueName: \"kubernetes.io/projected/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-kube-api-access-hpfgt\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.370497 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovs-rundir\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.370592 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovn-rundir\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.370612 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-config\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.370632 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-combined-ca-bundle\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.370654 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.374724 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovn-rundir\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.374835 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovs-rundir\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.375820 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-config\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.380670 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-combined-ca-bundle\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.380813 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.440309 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpfgt\" (UniqueName: \"kubernetes.io/projected/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-kube-api-access-hpfgt\") pod \"ovn-controller-metrics-4j44h\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.469997 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.472952 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nks5\" (UniqueName: \"kubernetes.io/projected/38a398da-6486-4cdc-9ad5-2b67683c1fb5-kube-api-access-8nks5\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.473054 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-dns-svc\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.473098 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.473134 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-config\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.579814 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nks5\" (UniqueName: \"kubernetes.io/projected/38a398da-6486-4cdc-9ad5-2b67683c1fb5-kube-api-access-8nks5\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.579927 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-dns-svc\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.579977 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.580018 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-config\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.581200 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-config\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.582536 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-dns-svc\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.583972 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.584659 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cb7995759-v2fd8"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.605022 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nks5\" (UniqueName: \"kubernetes.io/projected/38a398da-6486-4cdc-9ad5-2b67683c1fb5-kube-api-access-8nks5\") pod \"dnsmasq-dns-6d8b9bcdd7-lnql9\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.617613 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84d58dc6cf-5wgf8"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.625318 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.628236 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.639536 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.644941 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84d58dc6cf-5wgf8"] Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.648093 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.648135 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.648179 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.648788 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2727e3568c0236fe42cc24253d506d8b40838dfef4a67a28b07229e9c0fbc979"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.648841 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://2727e3568c0236fe42cc24253d506d8b40838dfef4a67a28b07229e9c0fbc979" gracePeriod=600 Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.784178 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-config\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.784222 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-nb\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.784266 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw5ng\" (UniqueName: \"kubernetes.io/projected/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-kube-api-access-gw5ng\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.784333 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-dns-svc\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.784363 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-sb\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.885809 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-dns-svc\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.885862 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-sb\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.885920 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-config\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.885938 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-nb\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.885969 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw5ng\" (UniqueName: \"kubernetes.io/projected/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-kube-api-access-gw5ng\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.886790 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-dns-svc\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.887022 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-sb\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.887427 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-nb\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.887788 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-config\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.903985 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw5ng\" (UniqueName: \"kubernetes.io/projected/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-kube-api-access-gw5ng\") pod \"dnsmasq-dns-84d58dc6cf-5wgf8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:30 crc kubenswrapper[5081]: I1003 15:47:30.968408 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:31 crc kubenswrapper[5081]: I1003 15:47:31.372808 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="2727e3568c0236fe42cc24253d506d8b40838dfef4a67a28b07229e9c0fbc979" exitCode=0 Oct 03 15:47:31 crc kubenswrapper[5081]: I1003 15:47:31.372862 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"2727e3568c0236fe42cc24253d506d8b40838dfef4a67a28b07229e9c0fbc979"} Oct 03 15:47:31 crc kubenswrapper[5081]: I1003 15:47:31.373220 5081 scope.go:117] "RemoveContainer" containerID="c5285c2706f0562112a34a4f95372325994d5f5da0f641a9bc545482a3b072b5" Oct 03 15:47:31 crc kubenswrapper[5081]: I1003 15:47:31.688756 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 03 15:47:31 crc kubenswrapper[5081]: I1003 15:47:31.993311 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.003320 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.106010 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-dns-svc\") pod \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.106129 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-dns-svc\") pod \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.106185 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4gnt\" (UniqueName: \"kubernetes.io/projected/c12ae457-80da-4a13-97d6-7fdacc9c43c4-kube-api-access-h4gnt\") pod \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.106206 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlsvn\" (UniqueName: \"kubernetes.io/projected/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-kube-api-access-xlsvn\") pod \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.106300 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-config\") pod \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\" (UID: \"c12ae457-80da-4a13-97d6-7fdacc9c43c4\") " Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.106321 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-config\") pod \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\" (UID: \"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53\") " Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.107049 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-config" (OuterVolumeSpecName: "config") pod "1cfbd5d7-e5c8-4d9e-9519-705da27c6e53" (UID: "1cfbd5d7-e5c8-4d9e-9519-705da27c6e53"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.107274 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1cfbd5d7-e5c8-4d9e-9519-705da27c6e53" (UID: "1cfbd5d7-e5c8-4d9e-9519-705da27c6e53"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.108409 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c12ae457-80da-4a13-97d6-7fdacc9c43c4" (UID: "c12ae457-80da-4a13-97d6-7fdacc9c43c4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.108436 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-config" (OuterVolumeSpecName: "config") pod "c12ae457-80da-4a13-97d6-7fdacc9c43c4" (UID: "c12ae457-80da-4a13-97d6-7fdacc9c43c4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.116842 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c12ae457-80da-4a13-97d6-7fdacc9c43c4-kube-api-access-h4gnt" (OuterVolumeSpecName: "kube-api-access-h4gnt") pod "c12ae457-80da-4a13-97d6-7fdacc9c43c4" (UID: "c12ae457-80da-4a13-97d6-7fdacc9c43c4"). InnerVolumeSpecName "kube-api-access-h4gnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.129036 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-kube-api-access-xlsvn" (OuterVolumeSpecName: "kube-api-access-xlsvn") pod "1cfbd5d7-e5c8-4d9e-9519-705da27c6e53" (UID: "1cfbd5d7-e5c8-4d9e-9519-705da27c6e53"). InnerVolumeSpecName "kube-api-access-xlsvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.209300 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.209702 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.209712 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12ae457-80da-4a13-97d6-7fdacc9c43c4-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.209720 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.209734 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4gnt\" (UniqueName: \"kubernetes.io/projected/c12ae457-80da-4a13-97d6-7fdacc9c43c4-kube-api-access-h4gnt\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.209744 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlsvn\" (UniqueName: \"kubernetes.io/projected/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53-kube-api-access-xlsvn\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.387589 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" event={"ID":"1cfbd5d7-e5c8-4d9e-9519-705da27c6e53","Type":"ContainerDied","Data":"e9e7849fdb38afffa7fd9df66dad896eade2cb5fe082b761e1874948fa287a7f"} Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.387618 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cb7995759-v2fd8" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.400451 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" event={"ID":"c12ae457-80da-4a13-97d6-7fdacc9c43c4","Type":"ContainerDied","Data":"a9443b064176971e15468d4e07d8af926458f84f837a71ca5e355cc4d85017c0"} Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.400478 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b749bd587-mccxf" Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.469725 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cb7995759-v2fd8"] Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.486351 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cb7995759-v2fd8"] Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.523667 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b749bd587-mccxf"] Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.534305 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b749bd587-mccxf"] Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.601615 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d8b9bcdd7-lnql9"] Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.620967 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-4j44h"] Oct 03 15:47:32 crc kubenswrapper[5081]: I1003 15:47:32.680474 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84d58dc6cf-5wgf8"] Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.416422 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" event={"ID":"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8","Type":"ContainerStarted","Data":"893fa699b250723b19b55568926a01eb9d14bd2b94f82cc0c59233e4629b3b20"} Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.419180 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"1f5f972a5d9ee4f18b25fc20005ef3fc5efd95afd61322fe1547661c45157b16"} Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.420365 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-4j44h" event={"ID":"930b2d10-e4fc-4e90-902a-2cb2068e2fcf","Type":"ContainerStarted","Data":"adf9592aeef114010d56f13b557ba9cd2b077b43920be74586bfafde6fe44d40"} Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.421511 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" event={"ID":"38a398da-6486-4cdc-9ad5-2b67683c1fb5","Type":"ContainerStarted","Data":"58218927027a2d8fdd6483e975c3c4c29164dca96dbf5e24edb356eb9f8c7b0e"} Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.538085 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d8b9bcdd7-lnql9"] Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.571464 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59775c759f-xvhlk"] Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.573108 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.613960 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59775c759f-xvhlk"] Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.739163 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-nb\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.739227 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-config\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.739308 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnc2v\" (UniqueName: \"kubernetes.io/projected/aaf72953-f8f2-4223-b8f8-07d58cb5418e-kube-api-access-xnc2v\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.739334 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-sb\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.739418 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-dns-svc\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.840886 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cfbd5d7-e5c8-4d9e-9519-705da27c6e53" path="/var/lib/kubelet/pods/1cfbd5d7-e5c8-4d9e-9519-705da27c6e53/volumes" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.841067 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnc2v\" (UniqueName: \"kubernetes.io/projected/aaf72953-f8f2-4223-b8f8-07d58cb5418e-kube-api-access-xnc2v\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.841107 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-sb\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.841165 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-dns-svc\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.841186 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-nb\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.841207 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-config\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.841355 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c12ae457-80da-4a13-97d6-7fdacc9c43c4" path="/var/lib/kubelet/pods/c12ae457-80da-4a13-97d6-7fdacc9c43c4/volumes" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.842625 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-config\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.842919 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-nb\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.843011 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-dns-svc\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.843331 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-sb\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.871084 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnc2v\" (UniqueName: \"kubernetes.io/projected/aaf72953-f8f2-4223-b8f8-07d58cb5418e-kube-api-access-xnc2v\") pod \"dnsmasq-dns-59775c759f-xvhlk\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:33 crc kubenswrapper[5081]: I1003 15:47:33.918089 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.422223 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59775c759f-xvhlk"] Oct 03 15:47:34 crc kubenswrapper[5081]: W1003 15:47:34.425916 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaaf72953_f8f2_4223_b8f8_07d58cb5418e.slice/crio-4b576980a85c825b12e191acced7ea9a63a8ba1ea0cad70fae83d168fd528a32 WatchSource:0}: Error finding container 4b576980a85c825b12e191acced7ea9a63a8ba1ea0cad70fae83d168fd528a32: Status 404 returned error can't find the container with id 4b576980a85c825b12e191acced7ea9a63a8ba1ea0cad70fae83d168fd528a32 Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.434281 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7060c6c8-bbe8-47ae-8ef2-4358291dbb61","Type":"ContainerStarted","Data":"02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05"} Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.661246 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.678998 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.681890 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-z87lw" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.682101 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.684934 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.685118 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.689816 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.783649 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8cws\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-kube-api-access-s8cws\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.784278 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-cache\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.784354 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.784439 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.784529 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-lock\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.886453 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-lock\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.886533 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8cws\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-kube-api-access-s8cws\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.886629 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-cache\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.886692 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.886763 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.887082 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-lock\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: E1003 15:47:34.887157 5081 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.887213 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: E1003 15:47:34.887229 5081 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.887240 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-cache\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: E1003 15:47:34.887370 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift podName:4791d8d2-2a2a-4595-8678-10ec383956f0 nodeName:}" failed. No retries permitted until 2025-10-03 15:47:35.38731842 +0000 UTC m=+1174.352875023 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift") pod "swift-storage-0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0") : configmap "swift-ring-files" not found Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.910940 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:34 crc kubenswrapper[5081]: I1003 15:47:34.912708 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8cws\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-kube-api-access-s8cws\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.144695 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-765fs"] Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.146162 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.149863 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.149900 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.151504 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.156925 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-765fs"] Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.294279 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-dispersionconf\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.294362 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8c36939f-a099-4873-aa0e-f0b2215798e4-etc-swift\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.294527 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-swiftconf\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.294771 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-ring-data-devices\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.294967 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-combined-ca-bundle\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.295030 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-scripts\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.295145 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh7bx\" (UniqueName: \"kubernetes.io/projected/8c36939f-a099-4873-aa0e-f0b2215798e4-kube-api-access-nh7bx\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397192 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-dispersionconf\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397637 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397661 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8c36939f-a099-4873-aa0e-f0b2215798e4-etc-swift\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397737 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-swiftconf\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397773 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-ring-data-devices\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397815 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-combined-ca-bundle\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397846 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-scripts\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.397908 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh7bx\" (UniqueName: \"kubernetes.io/projected/8c36939f-a099-4873-aa0e-f0b2215798e4-kube-api-access-nh7bx\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: E1003 15:47:35.398348 5081 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 15:47:35 crc kubenswrapper[5081]: E1003 15:47:35.398439 5081 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 15:47:35 crc kubenswrapper[5081]: E1003 15:47:35.398577 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift podName:4791d8d2-2a2a-4595-8678-10ec383956f0 nodeName:}" failed. No retries permitted until 2025-10-03 15:47:36.398534766 +0000 UTC m=+1175.364091369 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift") pod "swift-storage-0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0") : configmap "swift-ring-files" not found Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.398715 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8c36939f-a099-4873-aa0e-f0b2215798e4-etc-swift\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.402105 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-ring-data-devices\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.405434 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-scripts\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.405539 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-combined-ca-bundle\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.415311 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-swiftconf\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.422193 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-dispersionconf\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.423135 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh7bx\" (UniqueName: \"kubernetes.io/projected/8c36939f-a099-4873-aa0e-f0b2215798e4-kube-api-access-nh7bx\") pod \"swift-ring-rebalance-765fs\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.470045 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:47:35 crc kubenswrapper[5081]: I1003 15:47:35.472678 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" event={"ID":"aaf72953-f8f2-4223-b8f8-07d58cb5418e","Type":"ContainerStarted","Data":"4b576980a85c825b12e191acced7ea9a63a8ba1ea0cad70fae83d168fd528a32"} Oct 03 15:47:36 crc kubenswrapper[5081]: I1003 15:47:36.032266 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-765fs"] Oct 03 15:47:36 crc kubenswrapper[5081]: I1003 15:47:36.417334 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:36 crc kubenswrapper[5081]: E1003 15:47:36.417551 5081 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 15:47:36 crc kubenswrapper[5081]: E1003 15:47:36.417877 5081 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 15:47:36 crc kubenswrapper[5081]: E1003 15:47:36.417948 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift podName:4791d8d2-2a2a-4595-8678-10ec383956f0 nodeName:}" failed. No retries permitted until 2025-10-03 15:47:38.417924309 +0000 UTC m=+1177.383480922 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift") pod "swift-storage-0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0") : configmap "swift-ring-files" not found Oct 03 15:47:36 crc kubenswrapper[5081]: I1003 15:47:36.494315 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5344c615-93ae-4a4a-95b1-3bbe3327f42e","Type":"ContainerStarted","Data":"a5feaf1cce73df27885a56ebceeea244ff4cd05c4d9e4c7c1fd17a91558166ca"} Oct 03 15:47:36 crc kubenswrapper[5081]: I1003 15:47:36.499787 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e54a24e-4043-40a3-8715-bb461d3f1bde","Type":"ContainerStarted","Data":"39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706"} Oct 03 15:47:36 crc kubenswrapper[5081]: I1003 15:47:36.499948 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 15:47:36 crc kubenswrapper[5081]: I1003 15:47:36.554874 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.144959533 podStartE2EDuration="33.554848456s" podCreationTimestamp="2025-10-03 15:47:03 +0000 UTC" firstStartedPulling="2025-10-03 15:47:04.099344442 +0000 UTC m=+1143.064901055" lastFinishedPulling="2025-10-03 15:47:35.509233365 +0000 UTC m=+1174.474789978" observedRunningTime="2025-10-03 15:47:36.546127384 +0000 UTC m=+1175.511684027" watchObservedRunningTime="2025-10-03 15:47:36.554848456 +0000 UTC m=+1175.520405069" Oct 03 15:47:38 crc kubenswrapper[5081]: W1003 15:47:38.368994 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c36939f_a099_4873_aa0e_f0b2215798e4.slice/crio-34a0267978520df72cb23df3b2e910bc1394f9263b4c5b28da6c029813edefaa WatchSource:0}: Error finding container 34a0267978520df72cb23df3b2e910bc1394f9263b4c5b28da6c029813edefaa: Status 404 returned error can't find the container with id 34a0267978520df72cb23df3b2e910bc1394f9263b4c5b28da6c029813edefaa Oct 03 15:47:38 crc kubenswrapper[5081]: I1003 15:47:38.480210 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:38 crc kubenswrapper[5081]: E1003 15:47:38.480748 5081 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 15:47:38 crc kubenswrapper[5081]: E1003 15:47:38.480811 5081 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 15:47:38 crc kubenswrapper[5081]: E1003 15:47:38.480912 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift podName:4791d8d2-2a2a-4595-8678-10ec383956f0 nodeName:}" failed. No retries permitted until 2025-10-03 15:47:42.480885893 +0000 UTC m=+1181.446442506 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift") pod "swift-storage-0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0") : configmap "swift-ring-files" not found Oct 03 15:47:38 crc kubenswrapper[5081]: I1003 15:47:38.522306 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-765fs" event={"ID":"8c36939f-a099-4873-aa0e-f0b2215798e4","Type":"ContainerStarted","Data":"34a0267978520df72cb23df3b2e910bc1394f9263b4c5b28da6c029813edefaa"} Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.534360 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"262a88da-d2e8-428f-b38a-1b59714ddfe7","Type":"ContainerStarted","Data":"e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec"} Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.537325 5081 generic.go:334] "Generic (PLEG): container finished" podID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerID="00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced" exitCode=0 Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.538270 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" event={"ID":"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8","Type":"ContainerDied","Data":"00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced"} Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.542583 5081 generic.go:334] "Generic (PLEG): container finished" podID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerID="0fc2f7cb9c7b4ba27b7ba099eb1843dfaf72c7130cc376ba0fc7aba42bf613c1" exitCode=0 Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.542634 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" event={"ID":"aaf72953-f8f2-4223-b8f8-07d58cb5418e","Type":"ContainerDied","Data":"0fc2f7cb9c7b4ba27b7ba099eb1843dfaf72c7130cc376ba0fc7aba42bf613c1"} Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.550197 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5","Type":"ContainerDied","Data":"5334f37d8987ae4171ae3899b8da5f80778f1221e5d6c5d21da758b59468cc2f"} Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.550074 5081 generic.go:334] "Generic (PLEG): container finished" podID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerID="5334f37d8987ae4171ae3899b8da5f80778f1221e5d6c5d21da758b59468cc2f" exitCode=0 Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.553880 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f3630909-8ada-4296-af7c-8135f2221e39","Type":"ContainerStarted","Data":"2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068"} Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.566913 5081 generic.go:334] "Generic (PLEG): container finished" podID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerID="4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320" exitCode=0 Oct 03 15:47:39 crc kubenswrapper[5081]: I1003 15:47:39.566954 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1","Type":"ContainerDied","Data":"4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320"} Oct 03 15:47:40 crc kubenswrapper[5081]: E1003 15:47:40.026885 5081 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 03 15:47:40 crc kubenswrapper[5081]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 15:47:40 crc kubenswrapper[5081]: > podSandboxID="893fa699b250723b19b55568926a01eb9d14bd2b94f82cc0c59233e4629b3b20" Oct 03 15:47:40 crc kubenswrapper[5081]: E1003 15:47:40.027470 5081 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 03 15:47:40 crc kubenswrapper[5081]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n599h5cbh7ch5d4h66fh676hdbh546h95h88h5ffh55ch7fhch57ch687hddhc7h5fdh57dh674h56fh64ch98h9bh557h55dh646h54ch54fh5c4h597q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gw5ng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-84d58dc6cf-5wgf8_openstack(88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 15:47:40 crc kubenswrapper[5081]: > logger="UnhandledError" Oct 03 15:47:40 crc kubenswrapper[5081]: E1003 15:47:40.028607 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.582488 5081 generic.go:334] "Generic (PLEG): container finished" podID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerID="f53a1e2f0c35790a4b2b82250da1a2f59c50900b47cf68bf115effe5e730954c" exitCode=0 Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.582599 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-52hh5" event={"ID":"f41a1c07-9bcc-4237-869e-dff5d9c480f8","Type":"ContainerDied","Data":"f53a1e2f0c35790a4b2b82250da1a2f59c50900b47cf68bf115effe5e730954c"} Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.588776 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" event={"ID":"aaf72953-f8f2-4223-b8f8-07d58cb5418e","Type":"ContainerStarted","Data":"a756938884cf8ee1b7cd495eb1c954b4895a28a83f195dc625f5ed706a090fd4"} Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.588912 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.591595 5081 generic.go:334] "Generic (PLEG): container finished" podID="38a398da-6486-4cdc-9ad5-2b67683c1fb5" containerID="749a3c4e930d798513b19e89af8fb3c034c1072ed1f673a9d35ee0f660c90023" exitCode=0 Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.591761 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" event={"ID":"38a398da-6486-4cdc-9ad5-2b67683c1fb5","Type":"ContainerDied","Data":"749a3c4e930d798513b19e89af8fb3c034c1072ed1f673a9d35ee0f660c90023"} Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.594725 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq" event={"ID":"686d3fe6-8cc9-4013-a5f3-55fe41ac840e","Type":"ContainerStarted","Data":"973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f"} Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.594857 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-kngsq" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.598495 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5","Type":"ContainerStarted","Data":"34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a"} Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.602546 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1","Type":"ContainerStarted","Data":"de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d"} Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.647542 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=20.33085222 podStartE2EDuration="41.647502444s" podCreationTimestamp="2025-10-03 15:46:59 +0000 UTC" firstStartedPulling="2025-10-03 15:47:01.589328257 +0000 UTC m=+1140.554884870" lastFinishedPulling="2025-10-03 15:47:22.905978491 +0000 UTC m=+1161.871535094" observedRunningTime="2025-10-03 15:47:40.64011069 +0000 UTC m=+1179.605667323" watchObservedRunningTime="2025-10-03 15:47:40.647502444 +0000 UTC m=+1179.613059057" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.690720 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=19.484554691 podStartE2EDuration="41.690703313s" podCreationTimestamp="2025-10-03 15:46:59 +0000 UTC" firstStartedPulling="2025-10-03 15:47:01.535712677 +0000 UTC m=+1140.501269280" lastFinishedPulling="2025-10-03 15:47:23.741861289 +0000 UTC m=+1162.707417902" observedRunningTime="2025-10-03 15:47:40.686750168 +0000 UTC m=+1179.652306811" watchObservedRunningTime="2025-10-03 15:47:40.690703313 +0000 UTC m=+1179.656259926" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.748810 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" podStartSLOduration=3.211820971 podStartE2EDuration="7.748788491s" podCreationTimestamp="2025-10-03 15:47:33 +0000 UTC" firstStartedPulling="2025-10-03 15:47:34.42888912 +0000 UTC m=+1173.394445733" lastFinishedPulling="2025-10-03 15:47:38.96585664 +0000 UTC m=+1177.931413253" observedRunningTime="2025-10-03 15:47:40.742374936 +0000 UTC m=+1179.707931549" watchObservedRunningTime="2025-10-03 15:47:40.748788491 +0000 UTC m=+1179.714345104" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.874630 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.874722 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.904615 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:40 crc kubenswrapper[5081]: I1003 15:47:40.905027 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:41 crc kubenswrapper[5081]: I1003 15:47:41.866446 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-kngsq" podStartSLOduration=20.674396226 podStartE2EDuration="35.866421903s" podCreationTimestamp="2025-10-03 15:47:06 +0000 UTC" firstStartedPulling="2025-10-03 15:47:23.768836849 +0000 UTC m=+1162.734393462" lastFinishedPulling="2025-10-03 15:47:38.960862526 +0000 UTC m=+1177.926419139" observedRunningTime="2025-10-03 15:47:40.768010107 +0000 UTC m=+1179.733566720" watchObservedRunningTime="2025-10-03 15:47:41.866421903 +0000 UTC m=+1180.831978526" Oct 03 15:47:42 crc kubenswrapper[5081]: I1003 15:47:42.514440 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:42 crc kubenswrapper[5081]: E1003 15:47:42.514751 5081 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 15:47:42 crc kubenswrapper[5081]: E1003 15:47:42.514773 5081 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 15:47:42 crc kubenswrapper[5081]: E1003 15:47:42.514847 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift podName:4791d8d2-2a2a-4595-8678-10ec383956f0 nodeName:}" failed. No retries permitted until 2025-10-03 15:47:50.514823164 +0000 UTC m=+1189.480379777 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift") pod "swift-storage-0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0") : configmap "swift-ring-files" not found Oct 03 15:47:43 crc kubenswrapper[5081]: I1003 15:47:43.573478 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.008336 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.146772 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-config\") pod \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.146853 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nks5\" (UniqueName: \"kubernetes.io/projected/38a398da-6486-4cdc-9ad5-2b67683c1fb5-kube-api-access-8nks5\") pod \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.146900 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-dns-svc\") pod \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.146971 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-ovsdbserver-nb\") pod \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\" (UID: \"38a398da-6486-4cdc-9ad5-2b67683c1fb5\") " Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.155399 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38a398da-6486-4cdc-9ad5-2b67683c1fb5-kube-api-access-8nks5" (OuterVolumeSpecName: "kube-api-access-8nks5") pod "38a398da-6486-4cdc-9ad5-2b67683c1fb5" (UID: "38a398da-6486-4cdc-9ad5-2b67683c1fb5"). InnerVolumeSpecName "kube-api-access-8nks5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.173247 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "38a398da-6486-4cdc-9ad5-2b67683c1fb5" (UID: "38a398da-6486-4cdc-9ad5-2b67683c1fb5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.175027 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-config" (OuterVolumeSpecName: "config") pod "38a398da-6486-4cdc-9ad5-2b67683c1fb5" (UID: "38a398da-6486-4cdc-9ad5-2b67683c1fb5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.195348 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "38a398da-6486-4cdc-9ad5-2b67683c1fb5" (UID: "38a398da-6486-4cdc-9ad5-2b67683c1fb5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.248907 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.249286 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nks5\" (UniqueName: \"kubernetes.io/projected/38a398da-6486-4cdc-9ad5-2b67683c1fb5-kube-api-access-8nks5\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.249304 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.249317 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a398da-6486-4cdc-9ad5-2b67683c1fb5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.652518 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" event={"ID":"38a398da-6486-4cdc-9ad5-2b67683c1fb5","Type":"ContainerDied","Data":"58218927027a2d8fdd6483e975c3c4c29164dca96dbf5e24edb356eb9f8c7b0e"} Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.652626 5081 scope.go:117] "RemoveContainer" containerID="749a3c4e930d798513b19e89af8fb3c034c1072ed1f673a9d35ee0f660c90023" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.652649 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8b9bcdd7-lnql9" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.658536 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" event={"ID":"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8","Type":"ContainerStarted","Data":"a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534"} Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.659728 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.685081 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" podStartSLOduration=8.493466577 podStartE2EDuration="14.685059999s" podCreationTimestamp="2025-10-03 15:47:30 +0000 UTC" firstStartedPulling="2025-10-03 15:47:32.769593173 +0000 UTC m=+1171.735149786" lastFinishedPulling="2025-10-03 15:47:38.961186595 +0000 UTC m=+1177.926743208" observedRunningTime="2025-10-03 15:47:44.679763526 +0000 UTC m=+1183.645320159" watchObservedRunningTime="2025-10-03 15:47:44.685059999 +0000 UTC m=+1183.650616612" Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.736221 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d8b9bcdd7-lnql9"] Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.750601 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d8b9bcdd7-lnql9"] Oct 03 15:47:44 crc kubenswrapper[5081]: I1003 15:47:44.968445 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.021127 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.669583 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-765fs" event={"ID":"8c36939f-a099-4873-aa0e-f0b2215798e4","Type":"ContainerStarted","Data":"e23f33677c2414ff5f3fb51da521549962794cb0588c742db6be3c0002ff4347"} Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.674339 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"262a88da-d2e8-428f-b38a-1b59714ddfe7","Type":"ContainerStarted","Data":"466a0e5a4bc9f929bdfea1ce7776a7ad5217cf962aae9b86e3401192bd5301ea"} Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.676283 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-4j44h" event={"ID":"930b2d10-e4fc-4e90-902a-2cb2068e2fcf","Type":"ContainerStarted","Data":"e241addceb133533afe94ad230a8d11c2bf695f52e2d891b7ebe29478660b475"} Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.680018 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-52hh5" event={"ID":"f41a1c07-9bcc-4237-869e-dff5d9c480f8","Type":"ContainerStarted","Data":"a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252"} Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.680076 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-52hh5" event={"ID":"f41a1c07-9bcc-4237-869e-dff5d9c480f8","Type":"ContainerStarted","Data":"d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903"} Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.680439 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.687006 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f3630909-8ada-4296-af7c-8135f2221e39","Type":"ContainerStarted","Data":"9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1"} Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.696243 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-765fs" podStartSLOduration=4.693894632 podStartE2EDuration="10.696198363s" podCreationTimestamp="2025-10-03 15:47:35 +0000 UTC" firstStartedPulling="2025-10-03 15:47:38.425445591 +0000 UTC m=+1177.391002204" lastFinishedPulling="2025-10-03 15:47:44.427749322 +0000 UTC m=+1183.393305935" observedRunningTime="2025-10-03 15:47:45.695296767 +0000 UTC m=+1184.660853390" watchObservedRunningTime="2025-10-03 15:47:45.696198363 +0000 UTC m=+1184.661754986" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.719078 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=16.937964932 podStartE2EDuration="36.719058984s" podCreationTimestamp="2025-10-03 15:47:09 +0000 UTC" firstStartedPulling="2025-10-03 15:47:24.779647034 +0000 UTC m=+1163.745203647" lastFinishedPulling="2025-10-03 15:47:44.560741086 +0000 UTC m=+1183.526297699" observedRunningTime="2025-10-03 15:47:45.71685475 +0000 UTC m=+1184.682411383" watchObservedRunningTime="2025-10-03 15:47:45.719058984 +0000 UTC m=+1184.684615587" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.743855 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-52hh5" podStartSLOduration=25.516517107 podStartE2EDuration="39.74383253s" podCreationTimestamp="2025-10-03 15:47:06 +0000 UTC" firstStartedPulling="2025-10-03 15:47:24.699804467 +0000 UTC m=+1163.665361090" lastFinishedPulling="2025-10-03 15:47:38.92711989 +0000 UTC m=+1177.892676513" observedRunningTime="2025-10-03 15:47:45.739433163 +0000 UTC m=+1184.704989786" watchObservedRunningTime="2025-10-03 15:47:45.74383253 +0000 UTC m=+1184.709389143" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.757872 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-4j44h" podStartSLOduration=3.945489849 podStartE2EDuration="15.757851505s" podCreationTimestamp="2025-10-03 15:47:30 +0000 UTC" firstStartedPulling="2025-10-03 15:47:32.768816591 +0000 UTC m=+1171.734373204" lastFinishedPulling="2025-10-03 15:47:44.581178247 +0000 UTC m=+1183.546734860" observedRunningTime="2025-10-03 15:47:45.753654934 +0000 UTC m=+1184.719211547" watchObservedRunningTime="2025-10-03 15:47:45.757851505 +0000 UTC m=+1184.723408118" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.776138 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=19.084744071 podStartE2EDuration="39.776119783s" podCreationTimestamp="2025-10-03 15:47:06 +0000 UTC" firstStartedPulling="2025-10-03 15:47:23.869364434 +0000 UTC m=+1162.834921047" lastFinishedPulling="2025-10-03 15:47:44.560740146 +0000 UTC m=+1183.526296759" observedRunningTime="2025-10-03 15:47:45.772455937 +0000 UTC m=+1184.738012580" watchObservedRunningTime="2025-10-03 15:47:45.776119783 +0000 UTC m=+1184.741676476" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.783632 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:45 crc kubenswrapper[5081]: I1003 15:47:45.845044 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38a398da-6486-4cdc-9ad5-2b67683c1fb5" path="/var/lib/kubelet/pods/38a398da-6486-4cdc-9ad5-2b67683c1fb5/volumes" Oct 03 15:47:46 crc kubenswrapper[5081]: I1003 15:47:46.698974 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:47:46 crc kubenswrapper[5081]: I1003 15:47:46.746865 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:46 crc kubenswrapper[5081]: I1003 15:47:46.783134 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:46 crc kubenswrapper[5081]: I1003 15:47:46.790641 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:46 crc kubenswrapper[5081]: I1003 15:47:46.832529 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:47 crc kubenswrapper[5081]: I1003 15:47:47.042052 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:47 crc kubenswrapper[5081]: I1003 15:47:47.110270 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 03 15:47:47 crc kubenswrapper[5081]: I1003 15:47:47.707964 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:47 crc kubenswrapper[5081]: I1003 15:47:47.748518 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 03 15:47:47 crc kubenswrapper[5081]: I1003 15:47:47.750718 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.079396 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 03 15:47:48 crc kubenswrapper[5081]: E1003 15:47:48.079900 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38a398da-6486-4cdc-9ad5-2b67683c1fb5" containerName="init" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.079921 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="38a398da-6486-4cdc-9ad5-2b67683c1fb5" containerName="init" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.080110 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="38a398da-6486-4cdc-9ad5-2b67683c1fb5" containerName="init" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.081215 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.092990 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.095083 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.095213 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.095299 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.095503 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-xbvxd" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.164239 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.164315 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d5t7\" (UniqueName: \"kubernetes.io/projected/477e7150-1a22-403b-950e-6d1547d2859c-kube-api-access-4d5t7\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.164386 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/477e7150-1a22-403b-950e-6d1547d2859c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.164437 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-config\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.164475 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-scripts\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.164667 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.164796 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.266486 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.266547 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d5t7\" (UniqueName: \"kubernetes.io/projected/477e7150-1a22-403b-950e-6d1547d2859c-kube-api-access-4d5t7\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.266642 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/477e7150-1a22-403b-950e-6d1547d2859c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.266680 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-config\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.266710 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-scripts\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.266752 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.266781 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.267385 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/477e7150-1a22-403b-950e-6d1547d2859c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.267840 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-config\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.267952 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-scripts\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.275696 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.275720 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.280016 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.289863 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d5t7\" (UniqueName: \"kubernetes.io/projected/477e7150-1a22-403b-950e-6d1547d2859c-kube-api-access-4d5t7\") pod \"ovn-northd-0\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.408497 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.773669 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.919810 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.987151 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84d58dc6cf-5wgf8"] Oct 03 15:47:48 crc kubenswrapper[5081]: I1003 15:47:48.987422 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerName="dnsmasq-dns" containerID="cri-o://a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534" gracePeriod=10 Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.453433 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.490472 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-dns-svc\") pod \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.490527 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw5ng\" (UniqueName: \"kubernetes.io/projected/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-kube-api-access-gw5ng\") pod \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.490654 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-sb\") pod \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.490682 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-config\") pod \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.491477 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-nb\") pod \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\" (UID: \"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8\") " Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.510804 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-kube-api-access-gw5ng" (OuterVolumeSpecName: "kube-api-access-gw5ng") pod "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" (UID: "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8"). InnerVolumeSpecName "kube-api-access-gw5ng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.535386 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" (UID: "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.540461 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" (UID: "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.543636 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" (UID: "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.552361 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-config" (OuterVolumeSpecName: "config") pod "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" (UID: "88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.593547 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.593595 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.593604 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw5ng\" (UniqueName: \"kubernetes.io/projected/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-kube-api-access-gw5ng\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.593615 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.593624 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.723993 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"477e7150-1a22-403b-950e-6d1547d2859c","Type":"ContainerStarted","Data":"32e7adf5812d09d2318aeea63c100ec4689502bd2bd627508ae592d923d7f65a"} Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.725870 5081 generic.go:334] "Generic (PLEG): container finished" podID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerID="a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534" exitCode=0 Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.725952 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" event={"ID":"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8","Type":"ContainerDied","Data":"a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534"} Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.726009 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" event={"ID":"88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8","Type":"ContainerDied","Data":"893fa699b250723b19b55568926a01eb9d14bd2b94f82cc0c59233e4629b3b20"} Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.726028 5081 scope.go:117] "RemoveContainer" containerID="a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.725971 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d58dc6cf-5wgf8" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.758874 5081 scope.go:117] "RemoveContainer" containerID="00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.761466 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84d58dc6cf-5wgf8"] Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.768095 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84d58dc6cf-5wgf8"] Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.791932 5081 scope.go:117] "RemoveContainer" containerID="a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534" Oct 03 15:47:49 crc kubenswrapper[5081]: E1003 15:47:49.794782 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534\": container with ID starting with a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534 not found: ID does not exist" containerID="a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.794829 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534"} err="failed to get container status \"a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534\": rpc error: code = NotFound desc = could not find container \"a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534\": container with ID starting with a3349b4db40c8e7c155df9b8a3098df774ae95181a8df0c509cd9260a9c85534 not found: ID does not exist" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.794879 5081 scope.go:117] "RemoveContainer" containerID="00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced" Oct 03 15:47:49 crc kubenswrapper[5081]: E1003 15:47:49.801783 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced\": container with ID starting with 00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced not found: ID does not exist" containerID="00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.801833 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced"} err="failed to get container status \"00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced\": rpc error: code = NotFound desc = could not find container \"00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced\": container with ID starting with 00b5f6bc36711649e7f68c8a3ff1f1f97de409bcc7bc7b0b500cbb1290dc4ced not found: ID does not exist" Oct 03 15:47:49 crc kubenswrapper[5081]: I1003 15:47:49.838640 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" path="/var/lib/kubelet/pods/88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8/volumes" Oct 03 15:47:50 crc kubenswrapper[5081]: I1003 15:47:50.526671 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:47:50 crc kubenswrapper[5081]: E1003 15:47:50.527012 5081 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 15:47:50 crc kubenswrapper[5081]: E1003 15:47:50.527309 5081 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 15:47:50 crc kubenswrapper[5081]: E1003 15:47:50.527368 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift podName:4791d8d2-2a2a-4595-8678-10ec383956f0 nodeName:}" failed. No retries permitted until 2025-10-03 15:48:06.527351126 +0000 UTC m=+1205.492907739 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift") pod "swift-storage-0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0") : configmap "swift-ring-files" not found Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.384952 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-nwrww"] Oct 03 15:47:51 crc kubenswrapper[5081]: E1003 15:47:51.385739 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerName="init" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.385755 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerName="init" Oct 03 15:47:51 crc kubenswrapper[5081]: E1003 15:47:51.385774 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerName="dnsmasq-dns" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.385781 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerName="dnsmasq-dns" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.385998 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="88b9f3a7-5bfd-4a40-9ed2-70c5a4a8bad8" containerName="dnsmasq-dns" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.386734 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nwrww" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.394521 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nwrww"] Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.444087 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p8cq\" (UniqueName: \"kubernetes.io/projected/76c27ee0-08c0-41d9-89a6-30d73137b03f-kube-api-access-5p8cq\") pod \"keystone-db-create-nwrww\" (UID: \"76c27ee0-08c0-41d9-89a6-30d73137b03f\") " pod="openstack/keystone-db-create-nwrww" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.546355 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p8cq\" (UniqueName: \"kubernetes.io/projected/76c27ee0-08c0-41d9-89a6-30d73137b03f-kube-api-access-5p8cq\") pod \"keystone-db-create-nwrww\" (UID: \"76c27ee0-08c0-41d9-89a6-30d73137b03f\") " pod="openstack/keystone-db-create-nwrww" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.579704 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p8cq\" (UniqueName: \"kubernetes.io/projected/76c27ee0-08c0-41d9-89a6-30d73137b03f-kube-api-access-5p8cq\") pod \"keystone-db-create-nwrww\" (UID: \"76c27ee0-08c0-41d9-89a6-30d73137b03f\") " pod="openstack/keystone-db-create-nwrww" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.590009 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-7nlkd"] Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.592206 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7nlkd" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.616541 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-7nlkd"] Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.648661 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pclj8\" (UniqueName: \"kubernetes.io/projected/7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9-kube-api-access-pclj8\") pod \"placement-db-create-7nlkd\" (UID: \"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9\") " pod="openstack/placement-db-create-7nlkd" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.716207 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nwrww" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.749936 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pclj8\" (UniqueName: \"kubernetes.io/projected/7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9-kube-api-access-pclj8\") pod \"placement-db-create-7nlkd\" (UID: \"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9\") " pod="openstack/placement-db-create-7nlkd" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.780844 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pclj8\" (UniqueName: \"kubernetes.io/projected/7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9-kube-api-access-pclj8\") pod \"placement-db-create-7nlkd\" (UID: \"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9\") " pod="openstack/placement-db-create-7nlkd" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.853064 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-thgzm"] Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.854101 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thgzm" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.858453 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-thgzm"] Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.954714 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp9x6\" (UniqueName: \"kubernetes.io/projected/36ffd821-3040-46b1-b568-35edb1b5dc1d-kube-api-access-mp9x6\") pod \"glance-db-create-thgzm\" (UID: \"36ffd821-3040-46b1-b568-35edb1b5dc1d\") " pod="openstack/glance-db-create-thgzm" Oct 03 15:47:51 crc kubenswrapper[5081]: I1003 15:47:51.974951 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7nlkd" Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.057316 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp9x6\" (UniqueName: \"kubernetes.io/projected/36ffd821-3040-46b1-b568-35edb1b5dc1d-kube-api-access-mp9x6\") pod \"glance-db-create-thgzm\" (UID: \"36ffd821-3040-46b1-b568-35edb1b5dc1d\") " pod="openstack/glance-db-create-thgzm" Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.083999 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp9x6\" (UniqueName: \"kubernetes.io/projected/36ffd821-3040-46b1-b568-35edb1b5dc1d-kube-api-access-mp9x6\") pod \"glance-db-create-thgzm\" (UID: \"36ffd821-3040-46b1-b568-35edb1b5dc1d\") " pod="openstack/glance-db-create-thgzm" Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.190039 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thgzm" Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.430505 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nwrww"] Oct 03 15:47:52 crc kubenswrapper[5081]: W1003 15:47:52.442000 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76c27ee0_08c0_41d9_89a6_30d73137b03f.slice/crio-0efff4e21ad15bb487dc625c4848ea84f8223bcfd8d8fed2c3daaa9e892a2a63 WatchSource:0}: Error finding container 0efff4e21ad15bb487dc625c4848ea84f8223bcfd8d8fed2c3daaa9e892a2a63: Status 404 returned error can't find the container with id 0efff4e21ad15bb487dc625c4848ea84f8223bcfd8d8fed2c3daaa9e892a2a63 Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.492575 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-7nlkd"] Oct 03 15:47:52 crc kubenswrapper[5081]: W1003 15:47:52.506367 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ffdc8e4_e2d1_4bf8_a658_fa37587d07c9.slice/crio-d128a92d452bd9985425be0a6535dd22e5f57963de73e6e8a54b42fd3a0a6dcd WatchSource:0}: Error finding container d128a92d452bd9985425be0a6535dd22e5f57963de73e6e8a54b42fd3a0a6dcd: Status 404 returned error can't find the container with id d128a92d452bd9985425be0a6535dd22e5f57963de73e6e8a54b42fd3a0a6dcd Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.745813 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-thgzm"] Oct 03 15:47:52 crc kubenswrapper[5081]: W1003 15:47:52.751292 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36ffd821_3040_46b1_b568_35edb1b5dc1d.slice/crio-6c26a1722aa4cccf074ba45d760808311e7226838697a7bb05208a20ac85295e WatchSource:0}: Error finding container 6c26a1722aa4cccf074ba45d760808311e7226838697a7bb05208a20ac85295e: Status 404 returned error can't find the container with id 6c26a1722aa4cccf074ba45d760808311e7226838697a7bb05208a20ac85295e Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.756963 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nwrww" event={"ID":"76c27ee0-08c0-41d9-89a6-30d73137b03f","Type":"ContainerStarted","Data":"0efff4e21ad15bb487dc625c4848ea84f8223bcfd8d8fed2c3daaa9e892a2a63"} Oct 03 15:47:52 crc kubenswrapper[5081]: I1003 15:47:52.758280 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7nlkd" event={"ID":"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9","Type":"ContainerStarted","Data":"d128a92d452bd9985425be0a6535dd22e5f57963de73e6e8a54b42fd3a0a6dcd"} Oct 03 15:47:53 crc kubenswrapper[5081]: I1003 15:47:53.767106 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-thgzm" event={"ID":"36ffd821-3040-46b1-b568-35edb1b5dc1d","Type":"ContainerStarted","Data":"6c26a1722aa4cccf074ba45d760808311e7226838697a7bb05208a20ac85295e"} Oct 03 15:47:55 crc kubenswrapper[5081]: I1003 15:47:55.853183 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nwrww" event={"ID":"76c27ee0-08c0-41d9-89a6-30d73137b03f","Type":"ContainerStarted","Data":"190db332aa9ce963771e5f783cab24ab8b498b1573d76b02416354a661063d09"} Oct 03 15:47:55 crc kubenswrapper[5081]: I1003 15:47:55.856003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7nlkd" event={"ID":"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9","Type":"ContainerStarted","Data":"e9fbb481a454be536da4437d1b8adf8cc0b7c83a3173c9f8e7e021d1e234cb4c"} Oct 03 15:47:56 crc kubenswrapper[5081]: I1003 15:47:56.889147 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-nwrww" podStartSLOduration=5.889124766 podStartE2EDuration="5.889124766s" podCreationTimestamp="2025-10-03 15:47:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:47:56.879652042 +0000 UTC m=+1195.845208695" watchObservedRunningTime="2025-10-03 15:47:56.889124766 +0000 UTC m=+1195.854681389" Oct 03 15:47:57 crc kubenswrapper[5081]: I1003 15:47:57.881156 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-thgzm" event={"ID":"36ffd821-3040-46b1-b568-35edb1b5dc1d","Type":"ContainerStarted","Data":"093d59a50f7e6e5e82203965751ceb33aba255b5c54f027b9252d29af23a92c6"} Oct 03 15:47:57 crc kubenswrapper[5081]: I1003 15:47:57.904489 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-7nlkd" podStartSLOduration=6.9044696420000005 podStartE2EDuration="6.904469642s" podCreationTimestamp="2025-10-03 15:47:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:47:57.898078888 +0000 UTC m=+1196.863635501" watchObservedRunningTime="2025-10-03 15:47:57.904469642 +0000 UTC m=+1196.870026255" Oct 03 15:47:57 crc kubenswrapper[5081]: I1003 15:47:57.921747 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-thgzm" podStartSLOduration=6.921725451 podStartE2EDuration="6.921725451s" podCreationTimestamp="2025-10-03 15:47:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:47:57.916588373 +0000 UTC m=+1196.882144986" watchObservedRunningTime="2025-10-03 15:47:57.921725451 +0000 UTC m=+1196.887282084" Oct 03 15:47:58 crc kubenswrapper[5081]: E1003 15:47:58.718711 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76c27ee0_08c0_41d9_89a6_30d73137b03f.slice/crio-190db332aa9ce963771e5f783cab24ab8b498b1573d76b02416354a661063d09.scope\": RecentStats: unable to find data in memory cache]" Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.888251 5081 generic.go:334] "Generic (PLEG): container finished" podID="76c27ee0-08c0-41d9-89a6-30d73137b03f" containerID="190db332aa9ce963771e5f783cab24ab8b498b1573d76b02416354a661063d09" exitCode=0 Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.888337 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nwrww" event={"ID":"76c27ee0-08c0-41d9-89a6-30d73137b03f","Type":"ContainerDied","Data":"190db332aa9ce963771e5f783cab24ab8b498b1573d76b02416354a661063d09"} Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.890002 5081 generic.go:334] "Generic (PLEG): container finished" podID="8c36939f-a099-4873-aa0e-f0b2215798e4" containerID="e23f33677c2414ff5f3fb51da521549962794cb0588c742db6be3c0002ff4347" exitCode=0 Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.890070 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-765fs" event={"ID":"8c36939f-a099-4873-aa0e-f0b2215798e4","Type":"ContainerDied","Data":"e23f33677c2414ff5f3fb51da521549962794cb0588c742db6be3c0002ff4347"} Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.891676 5081 generic.go:334] "Generic (PLEG): container finished" podID="36ffd821-3040-46b1-b568-35edb1b5dc1d" containerID="093d59a50f7e6e5e82203965751ceb33aba255b5c54f027b9252d29af23a92c6" exitCode=0 Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.891720 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-thgzm" event={"ID":"36ffd821-3040-46b1-b568-35edb1b5dc1d","Type":"ContainerDied","Data":"093d59a50f7e6e5e82203965751ceb33aba255b5c54f027b9252d29af23a92c6"} Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.893108 5081 generic.go:334] "Generic (PLEG): container finished" podID="7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9" containerID="e9fbb481a454be536da4437d1b8adf8cc0b7c83a3173c9f8e7e021d1e234cb4c" exitCode=0 Oct 03 15:47:58 crc kubenswrapper[5081]: I1003 15:47:58.893144 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7nlkd" event={"ID":"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9","Type":"ContainerDied","Data":"e9fbb481a454be536da4437d1b8adf8cc0b7c83a3173c9f8e7e021d1e234cb4c"} Oct 03 15:47:59 crc kubenswrapper[5081]: I1003 15:47:59.913678 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"477e7150-1a22-403b-950e-6d1547d2859c","Type":"ContainerStarted","Data":"49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71"} Oct 03 15:47:59 crc kubenswrapper[5081]: I1003 15:47:59.914048 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"477e7150-1a22-403b-950e-6d1547d2859c","Type":"ContainerStarted","Data":"8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04"} Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.323942 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7nlkd" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.349779 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.546475988 podStartE2EDuration="12.349762417s" podCreationTimestamp="2025-10-03 15:47:48 +0000 UTC" firstStartedPulling="2025-10-03 15:47:48.783748891 +0000 UTC m=+1187.749305504" lastFinishedPulling="2025-10-03 15:47:58.58703532 +0000 UTC m=+1197.552591933" observedRunningTime="2025-10-03 15:47:59.93327656 +0000 UTC m=+1198.898833203" watchObservedRunningTime="2025-10-03 15:48:00.349762417 +0000 UTC m=+1199.315319030" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.437918 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thgzm" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.445953 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.449449 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pclj8\" (UniqueName: \"kubernetes.io/projected/7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9-kube-api-access-pclj8\") pod \"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9\" (UID: \"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.461207 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nwrww" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.461204 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9-kube-api-access-pclj8" (OuterVolumeSpecName: "kube-api-access-pclj8") pod "7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9" (UID: "7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9"). InnerVolumeSpecName "kube-api-access-pclj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551304 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh7bx\" (UniqueName: \"kubernetes.io/projected/8c36939f-a099-4873-aa0e-f0b2215798e4-kube-api-access-nh7bx\") pod \"8c36939f-a099-4873-aa0e-f0b2215798e4\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551400 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8c36939f-a099-4873-aa0e-f0b2215798e4-etc-swift\") pod \"8c36939f-a099-4873-aa0e-f0b2215798e4\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551467 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-ring-data-devices\") pod \"8c36939f-a099-4873-aa0e-f0b2215798e4\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551486 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-dispersionconf\") pod \"8c36939f-a099-4873-aa0e-f0b2215798e4\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551518 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-swiftconf\") pod \"8c36939f-a099-4873-aa0e-f0b2215798e4\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551550 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-scripts\") pod \"8c36939f-a099-4873-aa0e-f0b2215798e4\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551692 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mp9x6\" (UniqueName: \"kubernetes.io/projected/36ffd821-3040-46b1-b568-35edb1b5dc1d-kube-api-access-mp9x6\") pod \"36ffd821-3040-46b1-b568-35edb1b5dc1d\" (UID: \"36ffd821-3040-46b1-b568-35edb1b5dc1d\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.551719 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-combined-ca-bundle\") pod \"8c36939f-a099-4873-aa0e-f0b2215798e4\" (UID: \"8c36939f-a099-4873-aa0e-f0b2215798e4\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.552193 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pclj8\" (UniqueName: \"kubernetes.io/projected/7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9-kube-api-access-pclj8\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.552506 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "8c36939f-a099-4873-aa0e-f0b2215798e4" (UID: "8c36939f-a099-4873-aa0e-f0b2215798e4"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.552914 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c36939f-a099-4873-aa0e-f0b2215798e4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8c36939f-a099-4873-aa0e-f0b2215798e4" (UID: "8c36939f-a099-4873-aa0e-f0b2215798e4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.555509 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c36939f-a099-4873-aa0e-f0b2215798e4-kube-api-access-nh7bx" (OuterVolumeSpecName: "kube-api-access-nh7bx") pod "8c36939f-a099-4873-aa0e-f0b2215798e4" (UID: "8c36939f-a099-4873-aa0e-f0b2215798e4"). InnerVolumeSpecName "kube-api-access-nh7bx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.556187 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36ffd821-3040-46b1-b568-35edb1b5dc1d-kube-api-access-mp9x6" (OuterVolumeSpecName: "kube-api-access-mp9x6") pod "36ffd821-3040-46b1-b568-35edb1b5dc1d" (UID: "36ffd821-3040-46b1-b568-35edb1b5dc1d"). InnerVolumeSpecName "kube-api-access-mp9x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.557279 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "8c36939f-a099-4873-aa0e-f0b2215798e4" (UID: "8c36939f-a099-4873-aa0e-f0b2215798e4"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.572364 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-scripts" (OuterVolumeSpecName: "scripts") pod "8c36939f-a099-4873-aa0e-f0b2215798e4" (UID: "8c36939f-a099-4873-aa0e-f0b2215798e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.575714 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c36939f-a099-4873-aa0e-f0b2215798e4" (UID: "8c36939f-a099-4873-aa0e-f0b2215798e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.585369 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "8c36939f-a099-4873-aa0e-f0b2215798e4" (UID: "8c36939f-a099-4873-aa0e-f0b2215798e4"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653119 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p8cq\" (UniqueName: \"kubernetes.io/projected/76c27ee0-08c0-41d9-89a6-30d73137b03f-kube-api-access-5p8cq\") pod \"76c27ee0-08c0-41d9-89a6-30d73137b03f\" (UID: \"76c27ee0-08c0-41d9-89a6-30d73137b03f\") " Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653448 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mp9x6\" (UniqueName: \"kubernetes.io/projected/36ffd821-3040-46b1-b568-35edb1b5dc1d-kube-api-access-mp9x6\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653470 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653481 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh7bx\" (UniqueName: \"kubernetes.io/projected/8c36939f-a099-4873-aa0e-f0b2215798e4-kube-api-access-nh7bx\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653494 5081 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8c36939f-a099-4873-aa0e-f0b2215798e4-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653507 5081 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653517 5081 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653528 5081 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8c36939f-a099-4873-aa0e-f0b2215798e4-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.653541 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c36939f-a099-4873-aa0e-f0b2215798e4-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.656614 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76c27ee0-08c0-41d9-89a6-30d73137b03f-kube-api-access-5p8cq" (OuterVolumeSpecName: "kube-api-access-5p8cq") pod "76c27ee0-08c0-41d9-89a6-30d73137b03f" (UID: "76c27ee0-08c0-41d9-89a6-30d73137b03f"). InnerVolumeSpecName "kube-api-access-5p8cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.757702 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p8cq\" (UniqueName: \"kubernetes.io/projected/76c27ee0-08c0-41d9-89a6-30d73137b03f-kube-api-access-5p8cq\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.925038 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-thgzm" event={"ID":"36ffd821-3040-46b1-b568-35edb1b5dc1d","Type":"ContainerDied","Data":"6c26a1722aa4cccf074ba45d760808311e7226838697a7bb05208a20ac85295e"} Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.925084 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c26a1722aa4cccf074ba45d760808311e7226838697a7bb05208a20ac85295e" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.925188 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thgzm" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.932771 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7nlkd" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.932788 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7nlkd" event={"ID":"7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9","Type":"ContainerDied","Data":"d128a92d452bd9985425be0a6535dd22e5f57963de73e6e8a54b42fd3a0a6dcd"} Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.932872 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d128a92d452bd9985425be0a6535dd22e5f57963de73e6e8a54b42fd3a0a6dcd" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.935590 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nwrww" event={"ID":"76c27ee0-08c0-41d9-89a6-30d73137b03f","Type":"ContainerDied","Data":"0efff4e21ad15bb487dc625c4848ea84f8223bcfd8d8fed2c3daaa9e892a2a63"} Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.935633 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0efff4e21ad15bb487dc625c4848ea84f8223bcfd8d8fed2c3daaa9e892a2a63" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.935714 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nwrww" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.944533 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-765fs" event={"ID":"8c36939f-a099-4873-aa0e-f0b2215798e4","Type":"ContainerDied","Data":"34a0267978520df72cb23df3b2e910bc1394f9263b4c5b28da6c029813edefaa"} Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.944592 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34a0267978520df72cb23df3b2e910bc1394f9263b4c5b28da6c029813edefaa" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.944698 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 03 15:48:00 crc kubenswrapper[5081]: I1003 15:48:00.944778 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-765fs" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.992714 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-0aea-account-create-p852j"] Oct 03 15:48:01 crc kubenswrapper[5081]: E1003 15:48:01.993946 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ffd821-3040-46b1-b568-35edb1b5dc1d" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.994035 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ffd821-3040-46b1-b568-35edb1b5dc1d" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: E1003 15:48:01.994115 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c27ee0-08c0-41d9-89a6-30d73137b03f" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.994209 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c27ee0-08c0-41d9-89a6-30d73137b03f" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: E1003 15:48:01.994281 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c36939f-a099-4873-aa0e-f0b2215798e4" containerName="swift-ring-rebalance" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.994349 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c36939f-a099-4873-aa0e-f0b2215798e4" containerName="swift-ring-rebalance" Oct 03 15:48:01 crc kubenswrapper[5081]: E1003 15:48:01.994459 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.994535 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.994851 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.994949 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c36939f-a099-4873-aa0e-f0b2215798e4" containerName="swift-ring-rebalance" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.995024 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ffd821-3040-46b1-b568-35edb1b5dc1d" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.995099 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="76c27ee0-08c0-41d9-89a6-30d73137b03f" containerName="mariadb-database-create" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.995753 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0aea-account-create-p852j" Oct 03 15:48:01 crc kubenswrapper[5081]: I1003 15:48:01.998079 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.001520 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0aea-account-create-p852j"] Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.186265 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgf8m\" (UniqueName: \"kubernetes.io/projected/692f094b-09a6-4e73-820f-5b23de67a2de-kube-api-access-zgf8m\") pod \"glance-0aea-account-create-p852j\" (UID: \"692f094b-09a6-4e73-820f-5b23de67a2de\") " pod="openstack/glance-0aea-account-create-p852j" Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.287492 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgf8m\" (UniqueName: \"kubernetes.io/projected/692f094b-09a6-4e73-820f-5b23de67a2de-kube-api-access-zgf8m\") pod \"glance-0aea-account-create-p852j\" (UID: \"692f094b-09a6-4e73-820f-5b23de67a2de\") " pod="openstack/glance-0aea-account-create-p852j" Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.305860 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgf8m\" (UniqueName: \"kubernetes.io/projected/692f094b-09a6-4e73-820f-5b23de67a2de-kube-api-access-zgf8m\") pod \"glance-0aea-account-create-p852j\" (UID: \"692f094b-09a6-4e73-820f-5b23de67a2de\") " pod="openstack/glance-0aea-account-create-p852j" Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.314392 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0aea-account-create-p852j" Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.758170 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0aea-account-create-p852j"] Oct 03 15:48:02 crc kubenswrapper[5081]: W1003 15:48:02.768242 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod692f094b_09a6_4e73_820f_5b23de67a2de.slice/crio-70139a3a7dfc7fc3dcd599d4b3b1cb5c7368f528a9b3e8aeb6cb3b629c60f51e WatchSource:0}: Error finding container 70139a3a7dfc7fc3dcd599d4b3b1cb5c7368f528a9b3e8aeb6cb3b629c60f51e: Status 404 returned error can't find the container with id 70139a3a7dfc7fc3dcd599d4b3b1cb5c7368f528a9b3e8aeb6cb3b629c60f51e Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.965271 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0aea-account-create-p852j" event={"ID":"692f094b-09a6-4e73-820f-5b23de67a2de","Type":"ContainerStarted","Data":"f0dfbbe4c789c475a56039d3e5f56d642c183a192ba50df8037c1df1832128b4"} Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.965793 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0aea-account-create-p852j" event={"ID":"692f094b-09a6-4e73-820f-5b23de67a2de","Type":"ContainerStarted","Data":"70139a3a7dfc7fc3dcd599d4b3b1cb5c7368f528a9b3e8aeb6cb3b629c60f51e"} Oct 03 15:48:02 crc kubenswrapper[5081]: I1003 15:48:02.987882 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-0aea-account-create-p852j" podStartSLOduration=1.987854004 podStartE2EDuration="1.987854004s" podCreationTimestamp="2025-10-03 15:48:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:02.978788122 +0000 UTC m=+1201.944344735" watchObservedRunningTime="2025-10-03 15:48:02.987854004 +0000 UTC m=+1201.953410617" Oct 03 15:48:03 crc kubenswrapper[5081]: I1003 15:48:03.978303 5081 generic.go:334] "Generic (PLEG): container finished" podID="692f094b-09a6-4e73-820f-5b23de67a2de" containerID="f0dfbbe4c789c475a56039d3e5f56d642c183a192ba50df8037c1df1832128b4" exitCode=0 Oct 03 15:48:03 crc kubenswrapper[5081]: I1003 15:48:03.978430 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0aea-account-create-p852j" event={"ID":"692f094b-09a6-4e73-820f-5b23de67a2de","Type":"ContainerDied","Data":"f0dfbbe4c789c475a56039d3e5f56d642c183a192ba50df8037c1df1832128b4"} Oct 03 15:48:04 crc kubenswrapper[5081]: I1003 15:48:04.346818 5081 scope.go:117] "RemoveContainer" containerID="e658db7e8ef91c374c137c0299742a6edcb40b6ead470467dd2476cec2a28bd7" Oct 03 15:48:04 crc kubenswrapper[5081]: I1003 15:48:04.371507 5081 scope.go:117] "RemoveContainer" containerID="4a8bce6b92fd618fd9044f54fee341b51a19082ddfc8cf724475deb62413436b" Oct 03 15:48:04 crc kubenswrapper[5081]: I1003 15:48:04.402479 5081 scope.go:117] "RemoveContainer" containerID="08ac70bb6f4f09bf71b76919ab435a25a5dca989a51851caaed2491f81d19626" Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.274915 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0aea-account-create-p852j" Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.339591 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgf8m\" (UniqueName: \"kubernetes.io/projected/692f094b-09a6-4e73-820f-5b23de67a2de-kube-api-access-zgf8m\") pod \"692f094b-09a6-4e73-820f-5b23de67a2de\" (UID: \"692f094b-09a6-4e73-820f-5b23de67a2de\") " Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.348223 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/692f094b-09a6-4e73-820f-5b23de67a2de-kube-api-access-zgf8m" (OuterVolumeSpecName: "kube-api-access-zgf8m") pod "692f094b-09a6-4e73-820f-5b23de67a2de" (UID: "692f094b-09a6-4e73-820f-5b23de67a2de"). InnerVolumeSpecName "kube-api-access-zgf8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.441611 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgf8m\" (UniqueName: \"kubernetes.io/projected/692f094b-09a6-4e73-820f-5b23de67a2de-kube-api-access-zgf8m\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.995753 5081 generic.go:334] "Generic (PLEG): container finished" podID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerID="02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05" exitCode=0 Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.995986 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7060c6c8-bbe8-47ae-8ef2-4358291dbb61","Type":"ContainerDied","Data":"02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05"} Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.998338 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0aea-account-create-p852j" event={"ID":"692f094b-09a6-4e73-820f-5b23de67a2de","Type":"ContainerDied","Data":"70139a3a7dfc7fc3dcd599d4b3b1cb5c7368f528a9b3e8aeb6cb3b629c60f51e"} Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.998383 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70139a3a7dfc7fc3dcd599d4b3b1cb5c7368f528a9b3e8aeb6cb3b629c60f51e" Oct 03 15:48:05 crc kubenswrapper[5081]: I1003 15:48:05.998412 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0aea-account-create-p852j" Oct 03 15:48:06 crc kubenswrapper[5081]: I1003 15:48:06.561447 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:48:06 crc kubenswrapper[5081]: I1003 15:48:06.571579 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"swift-storage-0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " pod="openstack/swift-storage-0" Oct 03 15:48:06 crc kubenswrapper[5081]: I1003 15:48:06.811979 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.037963 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7060c6c8-bbe8-47ae-8ef2-4358291dbb61","Type":"ContainerStarted","Data":"5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e"} Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.040360 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.064884 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371965.78991 podStartE2EDuration="1m11.06486555s" podCreationTimestamp="2025-10-03 15:46:56 +0000 UTC" firstStartedPulling="2025-10-03 15:46:58.707460623 +0000 UTC m=+1137.673017236" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:07.06070145 +0000 UTC m=+1206.026258073" watchObservedRunningTime="2025-10-03 15:48:07.06486555 +0000 UTC m=+1206.030422163" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.115739 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-g95hn"] Oct 03 15:48:07 crc kubenswrapper[5081]: E1003 15:48:07.116173 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692f094b-09a6-4e73-820f-5b23de67a2de" containerName="mariadb-account-create" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.116198 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="692f094b-09a6-4e73-820f-5b23de67a2de" containerName="mariadb-account-create" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.116398 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="692f094b-09a6-4e73-820f-5b23de67a2de" containerName="mariadb-account-create" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.117132 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.119227 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.120304 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-xbxgf" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.130598 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-g95hn"] Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.274929 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-config-data\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.274999 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-combined-ca-bundle\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.275045 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz7wq\" (UniqueName: \"kubernetes.io/projected/81d22c4d-1624-46de-80d5-6b366cd99bf8-kube-api-access-tz7wq\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.275081 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-db-sync-config-data\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.328217 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.379456 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-combined-ca-bundle\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.379517 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz7wq\" (UniqueName: \"kubernetes.io/projected/81d22c4d-1624-46de-80d5-6b366cd99bf8-kube-api-access-tz7wq\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.379574 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-db-sync-config-data\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.379672 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-config-data\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.384385 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-db-sync-config-data\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.384830 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-combined-ca-bundle\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.385062 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-config-data\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.403233 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz7wq\" (UniqueName: \"kubernetes.io/projected/81d22c4d-1624-46de-80d5-6b366cd99bf8-kube-api-access-tz7wq\") pod \"glance-db-sync-g95hn\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.437921 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:07 crc kubenswrapper[5081]: I1003 15:48:07.749547 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-g95hn"] Oct 03 15:48:07 crc kubenswrapper[5081]: W1003 15:48:07.782419 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81d22c4d_1624_46de_80d5_6b366cd99bf8.slice/crio-97a7991efef42e622ad8c1677dcb36187c3931337bb409cd5aa52ec473cc40d9 WatchSource:0}: Error finding container 97a7991efef42e622ad8c1677dcb36187c3931337bb409cd5aa52ec473cc40d9: Status 404 returned error can't find the container with id 97a7991efef42e622ad8c1677dcb36187c3931337bb409cd5aa52ec473cc40d9 Oct 03 15:48:08 crc kubenswrapper[5081]: I1003 15:48:08.047318 5081 generic.go:334] "Generic (PLEG): container finished" podID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerID="a5feaf1cce73df27885a56ebceeea244ff4cd05c4d9e4c7c1fd17a91558166ca" exitCode=0 Oct 03 15:48:08 crc kubenswrapper[5081]: I1003 15:48:08.047642 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5344c615-93ae-4a4a-95b1-3bbe3327f42e","Type":"ContainerDied","Data":"a5feaf1cce73df27885a56ebceeea244ff4cd05c4d9e4c7c1fd17a91558166ca"} Oct 03 15:48:08 crc kubenswrapper[5081]: I1003 15:48:08.050197 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-g95hn" event={"ID":"81d22c4d-1624-46de-80d5-6b366cd99bf8","Type":"ContainerStarted","Data":"97a7991efef42e622ad8c1677dcb36187c3931337bb409cd5aa52ec473cc40d9"} Oct 03 15:48:08 crc kubenswrapper[5081]: I1003 15:48:08.051615 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"ab677a205221fcaeb13f4f7b06fda4f0ca2c646591db3e721c18127142d790a9"} Oct 03 15:48:08 crc kubenswrapper[5081]: I1003 15:48:08.478334 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 03 15:48:09 crc kubenswrapper[5081]: I1003 15:48:09.067883 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5344c615-93ae-4a4a-95b1-3bbe3327f42e","Type":"ContainerStarted","Data":"b01bf452cb29e0baf0468dc3879379123d94b893f16e315da708a69a521d9ace"} Oct 03 15:48:09 crc kubenswrapper[5081]: I1003 15:48:09.068401 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:48:09 crc kubenswrapper[5081]: I1003 15:48:09.090451 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=48.394662438 podStartE2EDuration="1m12.090435385s" podCreationTimestamp="2025-10-03 15:46:57 +0000 UTC" firstStartedPulling="2025-10-03 15:46:59.209708479 +0000 UTC m=+1138.175265092" lastFinishedPulling="2025-10-03 15:47:22.905481426 +0000 UTC m=+1161.871038039" observedRunningTime="2025-10-03 15:48:09.090409194 +0000 UTC m=+1208.055965817" watchObservedRunningTime="2025-10-03 15:48:09.090435385 +0000 UTC m=+1208.055991988" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.378480 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-106f-account-create-frpqk"] Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.380722 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-106f-account-create-frpqk" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.385477 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.391050 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-106f-account-create-frpqk"] Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.556527 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfbz8\" (UniqueName: \"kubernetes.io/projected/89176eaf-8985-42d1-a4b1-8fbc918beb38-kube-api-access-hfbz8\") pod \"keystone-106f-account-create-frpqk\" (UID: \"89176eaf-8985-42d1-a4b1-8fbc918beb38\") " pod="openstack/keystone-106f-account-create-frpqk" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.659499 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfbz8\" (UniqueName: \"kubernetes.io/projected/89176eaf-8985-42d1-a4b1-8fbc918beb38-kube-api-access-hfbz8\") pod \"keystone-106f-account-create-frpqk\" (UID: \"89176eaf-8985-42d1-a4b1-8fbc918beb38\") " pod="openstack/keystone-106f-account-create-frpqk" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.675805 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-9594-account-create-4wdgs"] Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.676917 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9594-account-create-4wdgs" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.680237 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.690310 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9594-account-create-4wdgs"] Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.693413 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfbz8\" (UniqueName: \"kubernetes.io/projected/89176eaf-8985-42d1-a4b1-8fbc918beb38-kube-api-access-hfbz8\") pod \"keystone-106f-account-create-frpqk\" (UID: \"89176eaf-8985-42d1-a4b1-8fbc918beb38\") " pod="openstack/keystone-106f-account-create-frpqk" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.704962 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-106f-account-create-frpqk" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.761267 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkfp7\" (UniqueName: \"kubernetes.io/projected/828c409e-c464-4024-8da4-3262e96be555-kube-api-access-vkfp7\") pod \"placement-9594-account-create-4wdgs\" (UID: \"828c409e-c464-4024-8da4-3262e96be555\") " pod="openstack/placement-9594-account-create-4wdgs" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.864814 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkfp7\" (UniqueName: \"kubernetes.io/projected/828c409e-c464-4024-8da4-3262e96be555-kube-api-access-vkfp7\") pod \"placement-9594-account-create-4wdgs\" (UID: \"828c409e-c464-4024-8da4-3262e96be555\") " pod="openstack/placement-9594-account-create-4wdgs" Oct 03 15:48:11 crc kubenswrapper[5081]: I1003 15:48:11.882183 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkfp7\" (UniqueName: \"kubernetes.io/projected/828c409e-c464-4024-8da4-3262e96be555-kube-api-access-vkfp7\") pod \"placement-9594-account-create-4wdgs\" (UID: \"828c409e-c464-4024-8da4-3262e96be555\") " pod="openstack/placement-9594-account-create-4wdgs" Oct 03 15:48:12 crc kubenswrapper[5081]: I1003 15:48:12.027358 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9594-account-create-4wdgs" Oct 03 15:48:12 crc kubenswrapper[5081]: I1003 15:48:12.262097 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-kngsq" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" probeResult="failure" output=< Oct 03 15:48:12 crc kubenswrapper[5081]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 03 15:48:12 crc kubenswrapper[5081]: > Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.196842 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-106f-account-create-frpqk"] Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.247571 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-kngsq" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" probeResult="failure" output=< Oct 03 15:48:17 crc kubenswrapper[5081]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 03 15:48:17 crc kubenswrapper[5081]: > Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.262489 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9594-account-create-4wdgs"] Oct 03 15:48:17 crc kubenswrapper[5081]: W1003 15:48:17.267258 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89176eaf_8985_42d1_a4b1_8fbc918beb38.slice/crio-eb9ff47d2506b49a26a1dab563a66e58c2165a832fe51dca491d06ed76d90625 WatchSource:0}: Error finding container eb9ff47d2506b49a26a1dab563a66e58c2165a832fe51dca491d06ed76d90625: Status 404 returned error can't find the container with id eb9ff47d2506b49a26a1dab563a66e58c2165a832fe51dca491d06ed76d90625 Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.279448 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.323624 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.628636 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-kngsq-config-t2b8k"] Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.629812 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.632679 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.669694 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kngsq-config-t2b8k"] Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.768812 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run-ovn\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.768859 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-additional-scripts\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.768901 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.768946 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-log-ovn\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.769229 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-scripts\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.769263 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fllgf\" (UniqueName: \"kubernetes.io/projected/1bbac405-a863-4bc4-9eef-a0bc39eba650-kube-api-access-fllgf\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.870535 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-log-ovn\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.870665 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-scripts\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.870707 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fllgf\" (UniqueName: \"kubernetes.io/projected/1bbac405-a863-4bc4-9eef-a0bc39eba650-kube-api-access-fllgf\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.870800 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run-ovn\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.870826 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-additional-scripts\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.870861 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.871061 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-log-ovn\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.871074 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.871076 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run-ovn\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.871669 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-additional-scripts\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.872654 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-scripts\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.896174 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fllgf\" (UniqueName: \"kubernetes.io/projected/1bbac405-a863-4bc4-9eef-a0bc39eba650-kube-api-access-fllgf\") pod \"ovn-controller-kngsq-config-t2b8k\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:17 crc kubenswrapper[5081]: I1003 15:48:17.966947 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:18 crc kubenswrapper[5081]: I1003 15:48:18.138897 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-106f-account-create-frpqk" event={"ID":"89176eaf-8985-42d1-a4b1-8fbc918beb38","Type":"ContainerStarted","Data":"eb9ff47d2506b49a26a1dab563a66e58c2165a832fe51dca491d06ed76d90625"} Oct 03 15:48:18 crc kubenswrapper[5081]: I1003 15:48:18.401252 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Oct 03 15:48:18 crc kubenswrapper[5081]: I1003 15:48:18.662736 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.103:5671: connect: connection refused" Oct 03 15:48:22 crc kubenswrapper[5081]: I1003 15:48:22.248978 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-kngsq" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" probeResult="failure" output=< Oct 03 15:48:22 crc kubenswrapper[5081]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 03 15:48:22 crc kubenswrapper[5081]: > Oct 03 15:48:25 crc kubenswrapper[5081]: I1003 15:48:25.201089 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9594-account-create-4wdgs" event={"ID":"828c409e-c464-4024-8da4-3262e96be555","Type":"ContainerStarted","Data":"840fa6024a9873e5388fecb54e7df050264d8daed013bea0b5facc11bc6786ce"} Oct 03 15:48:25 crc kubenswrapper[5081]: I1003 15:48:25.656123 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kngsq-config-t2b8k"] Oct 03 15:48:25 crc kubenswrapper[5081]: W1003 15:48:25.710381 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bbac405_a863_4bc4_9eef_a0bc39eba650.slice/crio-d168ca087da69f3ba9543a4da9a280d19cc1dbc4d498a8ee7bc9b4f9b603548e WatchSource:0}: Error finding container d168ca087da69f3ba9543a4da9a280d19cc1dbc4d498a8ee7bc9b4f9b603548e: Status 404 returned error can't find the container with id d168ca087da69f3ba9543a4da9a280d19cc1dbc4d498a8ee7bc9b4f9b603548e Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.213846 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"9fb9ff61254c258e053e39687cfe871e46e9e37bc3923e11a92f9ba4e6d36e54"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.214246 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"6183bf8da2a80f5b9e9698fac4e3f60d821b5c52084e202cff3ec20d564ffd21"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.214261 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"21dea586b40c33dcc79f88530d74f5e7fcd590c00c4174c95c9b987e02a408cb"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.214276 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"2dc48262e0ade0cd5b46c732df33fce4d98185362e199780f79f0145f57aa828"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.216006 5081 generic.go:334] "Generic (PLEG): container finished" podID="89176eaf-8985-42d1-a4b1-8fbc918beb38" containerID="3234f43ec5e5008c4467212fab4fb164ae01868c28d804e4bf4b3e6bb71a6336" exitCode=0 Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.216072 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-106f-account-create-frpqk" event={"ID":"89176eaf-8985-42d1-a4b1-8fbc918beb38","Type":"ContainerDied","Data":"3234f43ec5e5008c4467212fab4fb164ae01868c28d804e4bf4b3e6bb71a6336"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.217897 5081 generic.go:334] "Generic (PLEG): container finished" podID="828c409e-c464-4024-8da4-3262e96be555" containerID="094774f80b389d20b60c03d34539966beac21be622dcb0337f5f4ccb2984989b" exitCode=0 Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.217984 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9594-account-create-4wdgs" event={"ID":"828c409e-c464-4024-8da4-3262e96be555","Type":"ContainerDied","Data":"094774f80b389d20b60c03d34539966beac21be622dcb0337f5f4ccb2984989b"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.219982 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq-config-t2b8k" event={"ID":"1bbac405-a863-4bc4-9eef-a0bc39eba650","Type":"ContainerStarted","Data":"e4534c113168e11113fe38107a8f500f4417dbbb179d3846aa9fce87a53a18d2"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.220019 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq-config-t2b8k" event={"ID":"1bbac405-a863-4bc4-9eef-a0bc39eba650","Type":"ContainerStarted","Data":"d168ca087da69f3ba9543a4da9a280d19cc1dbc4d498a8ee7bc9b4f9b603548e"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.222861 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-g95hn" event={"ID":"81d22c4d-1624-46de-80d5-6b366cd99bf8","Type":"ContainerStarted","Data":"84123b425f08e68c1e7ab4696972c2e9b88d67c960a2818e57a2224785f30145"} Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.276325 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-g95hn" podStartSLOduration=1.771923829 podStartE2EDuration="19.276306658s" podCreationTimestamp="2025-10-03 15:48:07 +0000 UTC" firstStartedPulling="2025-10-03 15:48:07.784759887 +0000 UTC m=+1206.750316500" lastFinishedPulling="2025-10-03 15:48:25.289142716 +0000 UTC m=+1224.254699329" observedRunningTime="2025-10-03 15:48:26.260633115 +0000 UTC m=+1225.226189728" watchObservedRunningTime="2025-10-03 15:48:26.276306658 +0000 UTC m=+1225.241863271" Oct 03 15:48:26 crc kubenswrapper[5081]: I1003 15:48:26.291210 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-kngsq-config-t2b8k" podStartSLOduration=9.291187508 podStartE2EDuration="9.291187508s" podCreationTimestamp="2025-10-03 15:48:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:26.287727078 +0000 UTC m=+1225.253283691" watchObservedRunningTime="2025-10-03 15:48:26.291187508 +0000 UTC m=+1225.256744121" Oct 03 15:48:27 crc kubenswrapper[5081]: I1003 15:48:27.250654 5081 generic.go:334] "Generic (PLEG): container finished" podID="1bbac405-a863-4bc4-9eef-a0bc39eba650" containerID="e4534c113168e11113fe38107a8f500f4417dbbb179d3846aa9fce87a53a18d2" exitCode=0 Oct 03 15:48:27 crc kubenswrapper[5081]: I1003 15:48:27.251235 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq-config-t2b8k" event={"ID":"1bbac405-a863-4bc4-9eef-a0bc39eba650","Type":"ContainerDied","Data":"e4534c113168e11113fe38107a8f500f4417dbbb179d3846aa9fce87a53a18d2"} Oct 03 15:48:27 crc kubenswrapper[5081]: I1003 15:48:27.251325 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-kngsq" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:27.685987 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-106f-account-create-frpqk" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:27.791764 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9594-account-create-4wdgs" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:27.855262 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfbz8\" (UniqueName: \"kubernetes.io/projected/89176eaf-8985-42d1-a4b1-8fbc918beb38-kube-api-access-hfbz8\") pod \"89176eaf-8985-42d1-a4b1-8fbc918beb38\" (UID: \"89176eaf-8985-42d1-a4b1-8fbc918beb38\") " Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:27.875410 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89176eaf-8985-42d1-a4b1-8fbc918beb38-kube-api-access-hfbz8" (OuterVolumeSpecName: "kube-api-access-hfbz8") pod "89176eaf-8985-42d1-a4b1-8fbc918beb38" (UID: "89176eaf-8985-42d1-a4b1-8fbc918beb38"). InnerVolumeSpecName "kube-api-access-hfbz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:27.956588 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkfp7\" (UniqueName: \"kubernetes.io/projected/828c409e-c464-4024-8da4-3262e96be555-kube-api-access-vkfp7\") pod \"828c409e-c464-4024-8da4-3262e96be555\" (UID: \"828c409e-c464-4024-8da4-3262e96be555\") " Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:27.957204 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfbz8\" (UniqueName: \"kubernetes.io/projected/89176eaf-8985-42d1-a4b1-8fbc918beb38-kube-api-access-hfbz8\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:27.961928 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/828c409e-c464-4024-8da4-3262e96be555-kube-api-access-vkfp7" (OuterVolumeSpecName: "kube-api-access-vkfp7") pod "828c409e-c464-4024-8da4-3262e96be555" (UID: "828c409e-c464-4024-8da4-3262e96be555"). InnerVolumeSpecName "kube-api-access-vkfp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.058644 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkfp7\" (UniqueName: \"kubernetes.io/projected/828c409e-c464-4024-8da4-3262e96be555-kube-api-access-vkfp7\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.260867 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"d0329f49fa7dc846b20dc08a8389809ac26059dd282cb5b7a946f6475f240c48"} Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.260908 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"45463607f3525f8e75aa01b96f684ea8a4d207f0bdd044315c3bcfc0933d9b65"} Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.260919 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"676eedca602ba56315b71044aa07d745f875330d2c6a9a252a84c0c20469a5b3"} Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.261985 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-106f-account-create-frpqk" event={"ID":"89176eaf-8985-42d1-a4b1-8fbc918beb38","Type":"ContainerDied","Data":"eb9ff47d2506b49a26a1dab563a66e58c2165a832fe51dca491d06ed76d90625"} Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.262003 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb9ff47d2506b49a26a1dab563a66e58c2165a832fe51dca491d06ed76d90625" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.262052 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-106f-account-create-frpqk" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.271639 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9594-account-create-4wdgs" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.272090 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9594-account-create-4wdgs" event={"ID":"828c409e-c464-4024-8da4-3262e96be555","Type":"ContainerDied","Data":"840fa6024a9873e5388fecb54e7df050264d8daed013bea0b5facc11bc6786ce"} Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.272159 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="840fa6024a9873e5388fecb54e7df050264d8daed013bea0b5facc11bc6786ce" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.402651 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.669400 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.780679 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-9288s"] Oct 03 15:48:28 crc kubenswrapper[5081]: E1003 15:48:28.781060 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="828c409e-c464-4024-8da4-3262e96be555" containerName="mariadb-account-create" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.781072 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="828c409e-c464-4024-8da4-3262e96be555" containerName="mariadb-account-create" Oct 03 15:48:28 crc kubenswrapper[5081]: E1003 15:48:28.781092 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89176eaf-8985-42d1-a4b1-8fbc918beb38" containerName="mariadb-account-create" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.781098 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="89176eaf-8985-42d1-a4b1-8fbc918beb38" containerName="mariadb-account-create" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.781288 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="828c409e-c464-4024-8da4-3262e96be555" containerName="mariadb-account-create" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.781317 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="89176eaf-8985-42d1-a4b1-8fbc918beb38" containerName="mariadb-account-create" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.781915 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9288s" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.830349 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9288s"] Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.892262 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-x8jsz"] Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.893935 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x8jsz" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.894862 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfc5v\" (UniqueName: \"kubernetes.io/projected/83a84d35-e4fa-4026-b4e0-21bf2b354b0f-kube-api-access-zfc5v\") pod \"cinder-db-create-9288s\" (UID: \"83a84d35-e4fa-4026-b4e0-21bf2b354b0f\") " pod="openstack/cinder-db-create-9288s" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.914373 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-x8jsz"] Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.973629 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-mll7n"] Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.974641 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mll7n" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.987377 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mll7n"] Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.991122 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.996606 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfc5v\" (UniqueName: \"kubernetes.io/projected/83a84d35-e4fa-4026-b4e0-21bf2b354b0f-kube-api-access-zfc5v\") pod \"cinder-db-create-9288s\" (UID: \"83a84d35-e4fa-4026-b4e0-21bf2b354b0f\") " pod="openstack/cinder-db-create-9288s" Oct 03 15:48:28 crc kubenswrapper[5081]: I1003 15:48:28.996795 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shsdc\" (UniqueName: \"kubernetes.io/projected/1498b040-f5c3-404a-aced-26e9b82387c9-kube-api-access-shsdc\") pod \"barbican-db-create-x8jsz\" (UID: \"1498b040-f5c3-404a-aced-26e9b82387c9\") " pod="openstack/barbican-db-create-x8jsz" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.024131 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfc5v\" (UniqueName: \"kubernetes.io/projected/83a84d35-e4fa-4026-b4e0-21bf2b354b0f-kube-api-access-zfc5v\") pod \"cinder-db-create-9288s\" (UID: \"83a84d35-e4fa-4026-b4e0-21bf2b354b0f\") " pod="openstack/cinder-db-create-9288s" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098101 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run\") pod \"1bbac405-a863-4bc4-9eef-a0bc39eba650\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098156 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fllgf\" (UniqueName: \"kubernetes.io/projected/1bbac405-a863-4bc4-9eef-a0bc39eba650-kube-api-access-fllgf\") pod \"1bbac405-a863-4bc4-9eef-a0bc39eba650\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098223 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-scripts\") pod \"1bbac405-a863-4bc4-9eef-a0bc39eba650\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098279 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run-ovn\") pod \"1bbac405-a863-4bc4-9eef-a0bc39eba650\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098363 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-log-ovn\") pod \"1bbac405-a863-4bc4-9eef-a0bc39eba650\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098401 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-additional-scripts\") pod \"1bbac405-a863-4bc4-9eef-a0bc39eba650\" (UID: \"1bbac405-a863-4bc4-9eef-a0bc39eba650\") " Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098609 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shsdc\" (UniqueName: \"kubernetes.io/projected/1498b040-f5c3-404a-aced-26e9b82387c9-kube-api-access-shsdc\") pod \"barbican-db-create-x8jsz\" (UID: \"1498b040-f5c3-404a-aced-26e9b82387c9\") " pod="openstack/barbican-db-create-x8jsz" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098708 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6v98\" (UniqueName: \"kubernetes.io/projected/b82c10f0-5e99-44c9-807d-e34c63b187e9-kube-api-access-h6v98\") pod \"neutron-db-create-mll7n\" (UID: \"b82c10f0-5e99-44c9-807d-e34c63b187e9\") " pod="openstack/neutron-db-create-mll7n" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098798 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1bbac405-a863-4bc4-9eef-a0bc39eba650" (UID: "1bbac405-a863-4bc4-9eef-a0bc39eba650"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.098817 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1bbac405-a863-4bc4-9eef-a0bc39eba650" (UID: "1bbac405-a863-4bc4-9eef-a0bc39eba650"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.099169 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "1bbac405-a863-4bc4-9eef-a0bc39eba650" (UID: "1bbac405-a863-4bc4-9eef-a0bc39eba650"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.099519 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-scripts" (OuterVolumeSpecName: "scripts") pod "1bbac405-a863-4bc4-9eef-a0bc39eba650" (UID: "1bbac405-a863-4bc4-9eef-a0bc39eba650"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.099663 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run" (OuterVolumeSpecName: "var-run") pod "1bbac405-a863-4bc4-9eef-a0bc39eba650" (UID: "1bbac405-a863-4bc4-9eef-a0bc39eba650"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.117291 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bbac405-a863-4bc4-9eef-a0bc39eba650-kube-api-access-fllgf" (OuterVolumeSpecName: "kube-api-access-fllgf") pod "1bbac405-a863-4bc4-9eef-a0bc39eba650" (UID: "1bbac405-a863-4bc4-9eef-a0bc39eba650"). InnerVolumeSpecName "kube-api-access-fllgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.117543 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9288s" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.124670 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shsdc\" (UniqueName: \"kubernetes.io/projected/1498b040-f5c3-404a-aced-26e9b82387c9-kube-api-access-shsdc\") pod \"barbican-db-create-x8jsz\" (UID: \"1498b040-f5c3-404a-aced-26e9b82387c9\") " pod="openstack/barbican-db-create-x8jsz" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.207592 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6v98\" (UniqueName: \"kubernetes.io/projected/b82c10f0-5e99-44c9-807d-e34c63b187e9-kube-api-access-h6v98\") pod \"neutron-db-create-mll7n\" (UID: \"b82c10f0-5e99-44c9-807d-e34c63b187e9\") " pod="openstack/neutron-db-create-mll7n" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.210935 5081 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.210964 5081 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.211082 5081 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.211092 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fllgf\" (UniqueName: \"kubernetes.io/projected/1bbac405-a863-4bc4-9eef-a0bc39eba650-kube-api-access-fllgf\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.211101 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1bbac405-a863-4bc4-9eef-a0bc39eba650-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.211111 5081 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bbac405-a863-4bc4-9eef-a0bc39eba650-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.231130 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6v98\" (UniqueName: \"kubernetes.io/projected/b82c10f0-5e99-44c9-807d-e34c63b187e9-kube-api-access-h6v98\") pod \"neutron-db-create-mll7n\" (UID: \"b82c10f0-5e99-44c9-807d-e34c63b187e9\") " pod="openstack/neutron-db-create-mll7n" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.289897 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"ae79f147bd8fd93a896550501e3a9434ca704c15d3e99a1d98595472b5b0f638"} Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.291498 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq-config-t2b8k" event={"ID":"1bbac405-a863-4bc4-9eef-a0bc39eba650","Type":"ContainerDied","Data":"d168ca087da69f3ba9543a4da9a280d19cc1dbc4d498a8ee7bc9b4f9b603548e"} Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.291542 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d168ca087da69f3ba9543a4da9a280d19cc1dbc4d498a8ee7bc9b4f9b603548e" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.291628 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq-config-t2b8k" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.306931 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x8jsz" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.318865 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mll7n" Oct 03 15:48:29 crc kubenswrapper[5081]: E1003 15:48:29.440804 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bbac405_a863_4bc4_9eef_a0bc39eba650.slice\": RecentStats: unable to find data in memory cache]" Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.612538 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9288s"] Oct 03 15:48:29 crc kubenswrapper[5081]: W1003 15:48:29.831277 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1498b040_f5c3_404a_aced_26e9b82387c9.slice/crio-625a636e0e9d65b72eed2c9ff4275af850d0c844dd4f3a4205267aa4af6f5528 WatchSource:0}: Error finding container 625a636e0e9d65b72eed2c9ff4275af850d0c844dd4f3a4205267aa4af6f5528: Status 404 returned error can't find the container with id 625a636e0e9d65b72eed2c9ff4275af850d0c844dd4f3a4205267aa4af6f5528 Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.838386 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-x8jsz"] Oct 03 15:48:29 crc kubenswrapper[5081]: I1003 15:48:29.905743 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mll7n"] Oct 03 15:48:29 crc kubenswrapper[5081]: W1003 15:48:29.915798 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb82c10f0_5e99_44c9_807d_e34c63b187e9.slice/crio-e2dcf217667fd57d73595b9c0e4c8eb37b87a07114a1700e25523f2f7c3e4311 WatchSource:0}: Error finding container e2dcf217667fd57d73595b9c0e4c8eb37b87a07114a1700e25523f2f7c3e4311: Status 404 returned error can't find the container with id e2dcf217667fd57d73595b9c0e4c8eb37b87a07114a1700e25523f2f7c3e4311 Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.103240 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kngsq-config-t2b8k"] Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.109769 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-kngsq-config-t2b8k"] Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.329168 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-x8jsz" event={"ID":"1498b040-f5c3-404a-aced-26e9b82387c9","Type":"ContainerStarted","Data":"19c786c1e99f549fa8a5118fc3a61426531da6442d4e1c883048e996969dd30a"} Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.329542 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-x8jsz" event={"ID":"1498b040-f5c3-404a-aced-26e9b82387c9","Type":"ContainerStarted","Data":"625a636e0e9d65b72eed2c9ff4275af850d0c844dd4f3a4205267aa4af6f5528"} Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.339041 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mll7n" event={"ID":"b82c10f0-5e99-44c9-807d-e34c63b187e9","Type":"ContainerStarted","Data":"8607971b18cfb9e06089269e699a5ef2143d8a8931505756f195f8bf0fd0715f"} Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.339090 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mll7n" event={"ID":"b82c10f0-5e99-44c9-807d-e34c63b187e9","Type":"ContainerStarted","Data":"e2dcf217667fd57d73595b9c0e4c8eb37b87a07114a1700e25523f2f7c3e4311"} Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.351621 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9288s" event={"ID":"83a84d35-e4fa-4026-b4e0-21bf2b354b0f","Type":"ContainerStarted","Data":"37c70bdce53077b7f4007fd4f9b0dd5d2ebb6fc02349ab8a99dcef4b5e632492"} Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.351671 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9288s" event={"ID":"83a84d35-e4fa-4026-b4e0-21bf2b354b0f","Type":"ContainerStarted","Data":"0eeca48c3a36f961e715bf4c2825a24165bfcc2a8c65065a536c2f78691aa96e"} Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.359445 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-x8jsz" podStartSLOduration=2.359428075 podStartE2EDuration="2.359428075s" podCreationTimestamp="2025-10-03 15:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:30.350694252 +0000 UTC m=+1229.316250875" watchObservedRunningTime="2025-10-03 15:48:30.359428075 +0000 UTC m=+1229.324984688" Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.406756 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-9288s" podStartSLOduration=2.406737074 podStartE2EDuration="2.406737074s" podCreationTimestamp="2025-10-03 15:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:30.406483497 +0000 UTC m=+1229.372040110" watchObservedRunningTime="2025-10-03 15:48:30.406737074 +0000 UTC m=+1229.372293687" Oct 03 15:48:30 crc kubenswrapper[5081]: I1003 15:48:30.407401 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-mll7n" podStartSLOduration=2.407392083 podStartE2EDuration="2.407392083s" podCreationTimestamp="2025-10-03 15:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:30.380410402 +0000 UTC m=+1229.345967035" watchObservedRunningTime="2025-10-03 15:48:30.407392083 +0000 UTC m=+1229.372948696" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.360030 5081 generic.go:334] "Generic (PLEG): container finished" podID="b82c10f0-5e99-44c9-807d-e34c63b187e9" containerID="8607971b18cfb9e06089269e699a5ef2143d8a8931505756f195f8bf0fd0715f" exitCode=0 Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.360410 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mll7n" event={"ID":"b82c10f0-5e99-44c9-807d-e34c63b187e9","Type":"ContainerDied","Data":"8607971b18cfb9e06089269e699a5ef2143d8a8931505756f195f8bf0fd0715f"} Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.365035 5081 generic.go:334] "Generic (PLEG): container finished" podID="83a84d35-e4fa-4026-b4e0-21bf2b354b0f" containerID="37c70bdce53077b7f4007fd4f9b0dd5d2ebb6fc02349ab8a99dcef4b5e632492" exitCode=0 Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.365155 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9288s" event={"ID":"83a84d35-e4fa-4026-b4e0-21bf2b354b0f","Type":"ContainerDied","Data":"37c70bdce53077b7f4007fd4f9b0dd5d2ebb6fc02349ab8a99dcef4b5e632492"} Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.368016 5081 generic.go:334] "Generic (PLEG): container finished" podID="1498b040-f5c3-404a-aced-26e9b82387c9" containerID="19c786c1e99f549fa8a5118fc3a61426531da6442d4e1c883048e996969dd30a" exitCode=0 Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.368097 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-x8jsz" event={"ID":"1498b040-f5c3-404a-aced-26e9b82387c9","Type":"ContainerDied","Data":"19c786c1e99f549fa8a5118fc3a61426531da6442d4e1c883048e996969dd30a"} Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.384826 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"962a1e3fd606faa70fe55c161c25398c016ac0969ef92d4e88b58a60f3ef02eb"} Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.384877 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"2d265cc1788a8ffd41d868c45e20c3c29c12f51c4e066b3d0b0c81546645bcfa"} Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.871835 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bbac405-a863-4bc4-9eef-a0bc39eba650" path="/var/lib/kubelet/pods/1bbac405-a863-4bc4-9eef-a0bc39eba650/volumes" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.981446 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-rd97m"] Oct 03 15:48:31 crc kubenswrapper[5081]: E1003 15:48:31.981905 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bbac405-a863-4bc4-9eef-a0bc39eba650" containerName="ovn-config" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.981928 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bbac405-a863-4bc4-9eef-a0bc39eba650" containerName="ovn-config" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.982172 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bbac405-a863-4bc4-9eef-a0bc39eba650" containerName="ovn-config" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.982886 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.986257 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.986405 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mjd9n" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.986455 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.986880 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 15:48:31 crc kubenswrapper[5081]: I1003 15:48:31.990593 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-rd97m"] Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.163758 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-config-data\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.163839 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df2lb\" (UniqueName: \"kubernetes.io/projected/68de214d-52d3-4916-9c7f-720fc3ebc0f5-kube-api-access-df2lb\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.163899 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-combined-ca-bundle\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.265336 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-config-data\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.265766 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df2lb\" (UniqueName: \"kubernetes.io/projected/68de214d-52d3-4916-9c7f-720fc3ebc0f5-kube-api-access-df2lb\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.265817 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-combined-ca-bundle\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.271962 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-combined-ca-bundle\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.272233 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-config-data\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.295809 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df2lb\" (UniqueName: \"kubernetes.io/projected/68de214d-52d3-4916-9c7f-720fc3ebc0f5-kube-api-access-df2lb\") pod \"keystone-db-sync-rd97m\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:32 crc kubenswrapper[5081]: I1003 15:48:32.300818 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.413144 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"fc2a1a4f0df9739d588ce081aaaa43ab9cfe57521cacaf41f5e2e169875cad7b"} Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.793960 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x8jsz" Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.809546 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9288s" Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.817881 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mll7n" Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.886637 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-rd97m"] Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.895959 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shsdc\" (UniqueName: \"kubernetes.io/projected/1498b040-f5c3-404a-aced-26e9b82387c9-kube-api-access-shsdc\") pod \"1498b040-f5c3-404a-aced-26e9b82387c9\" (UID: \"1498b040-f5c3-404a-aced-26e9b82387c9\") " Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.896069 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfc5v\" (UniqueName: \"kubernetes.io/projected/83a84d35-e4fa-4026-b4e0-21bf2b354b0f-kube-api-access-zfc5v\") pod \"83a84d35-e4fa-4026-b4e0-21bf2b354b0f\" (UID: \"83a84d35-e4fa-4026-b4e0-21bf2b354b0f\") " Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.903717 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1498b040-f5c3-404a-aced-26e9b82387c9-kube-api-access-shsdc" (OuterVolumeSpecName: "kube-api-access-shsdc") pod "1498b040-f5c3-404a-aced-26e9b82387c9" (UID: "1498b040-f5c3-404a-aced-26e9b82387c9"). InnerVolumeSpecName "kube-api-access-shsdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.903823 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83a84d35-e4fa-4026-b4e0-21bf2b354b0f-kube-api-access-zfc5v" (OuterVolumeSpecName: "kube-api-access-zfc5v") pod "83a84d35-e4fa-4026-b4e0-21bf2b354b0f" (UID: "83a84d35-e4fa-4026-b4e0-21bf2b354b0f"). InnerVolumeSpecName "kube-api-access-zfc5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.998258 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6v98\" (UniqueName: \"kubernetes.io/projected/b82c10f0-5e99-44c9-807d-e34c63b187e9-kube-api-access-h6v98\") pod \"b82c10f0-5e99-44c9-807d-e34c63b187e9\" (UID: \"b82c10f0-5e99-44c9-807d-e34c63b187e9\") " Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.998864 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shsdc\" (UniqueName: \"kubernetes.io/projected/1498b040-f5c3-404a-aced-26e9b82387c9-kube-api-access-shsdc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:33 crc kubenswrapper[5081]: I1003 15:48:33.998891 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfc5v\" (UniqueName: \"kubernetes.io/projected/83a84d35-e4fa-4026-b4e0-21bf2b354b0f-kube-api-access-zfc5v\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.002956 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b82c10f0-5e99-44c9-807d-e34c63b187e9-kube-api-access-h6v98" (OuterVolumeSpecName: "kube-api-access-h6v98") pod "b82c10f0-5e99-44c9-807d-e34c63b187e9" (UID: "b82c10f0-5e99-44c9-807d-e34c63b187e9"). InnerVolumeSpecName "kube-api-access-h6v98". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.100524 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6v98\" (UniqueName: \"kubernetes.io/projected/b82c10f0-5e99-44c9-807d-e34c63b187e9-kube-api-access-h6v98\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.421745 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mll7n" event={"ID":"b82c10f0-5e99-44c9-807d-e34c63b187e9","Type":"ContainerDied","Data":"e2dcf217667fd57d73595b9c0e4c8eb37b87a07114a1700e25523f2f7c3e4311"} Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.422084 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2dcf217667fd57d73595b9c0e4c8eb37b87a07114a1700e25523f2f7c3e4311" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.422166 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mll7n" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.424580 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9288s" event={"ID":"83a84d35-e4fa-4026-b4e0-21bf2b354b0f","Type":"ContainerDied","Data":"0eeca48c3a36f961e715bf4c2825a24165bfcc2a8c65065a536c2f78691aa96e"} Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.424620 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0eeca48c3a36f961e715bf4c2825a24165bfcc2a8c65065a536c2f78691aa96e" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.424669 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9288s" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.425950 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-x8jsz" event={"ID":"1498b040-f5c3-404a-aced-26e9b82387c9","Type":"ContainerDied","Data":"625a636e0e9d65b72eed2c9ff4275af850d0c844dd4f3a4205267aa4af6f5528"} Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.425971 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="625a636e0e9d65b72eed2c9ff4275af850d0c844dd4f3a4205267aa4af6f5528" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.425958 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x8jsz" Oct 03 15:48:34 crc kubenswrapper[5081]: I1003 15:48:34.426825 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rd97m" event={"ID":"68de214d-52d3-4916-9c7f-720fc3ebc0f5","Type":"ContainerStarted","Data":"6a58213e9f9bdfe02d39bd2029c38fdbf3e0a7e1df8fa4f17543a174118897e3"} Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.445737 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"00b89755a18526b56352f35b1330f853e78ba6ee6b50eb49214837e6f9797ab9"} Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.445780 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"e80c760b9b37c5a71b037090135c5ba4f32f98d8709aa318d7bf69734c058ea2"} Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.445793 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"a7b2d7ca1d510ef79cf6048ee9579f2ac7ff3e40ff9234a031ecf02dfba25777"} Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.445802 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerStarted","Data":"0dfae43d5dfa9c09237aa7cb9e6fcba01b60a0f0e13fdd86961e6469f5f09d3c"} Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.508936 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=38.898726667 podStartE2EDuration="1m2.5089072s" podCreationTimestamp="2025-10-03 15:47:33 +0000 UTC" firstStartedPulling="2025-10-03 15:48:07.341711462 +0000 UTC m=+1206.307268075" lastFinishedPulling="2025-10-03 15:48:30.951891985 +0000 UTC m=+1229.917448608" observedRunningTime="2025-10-03 15:48:35.503901825 +0000 UTC m=+1234.469458458" watchObservedRunningTime="2025-10-03 15:48:35.5089072 +0000 UTC m=+1234.474463803" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.740153 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7dd7d6c7-jt5hv"] Oct 03 15:48:35 crc kubenswrapper[5081]: E1003 15:48:35.740611 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83a84d35-e4fa-4026-b4e0-21bf2b354b0f" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.740628 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="83a84d35-e4fa-4026-b4e0-21bf2b354b0f" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: E1003 15:48:35.740654 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1498b040-f5c3-404a-aced-26e9b82387c9" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.740661 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1498b040-f5c3-404a-aced-26e9b82387c9" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: E1003 15:48:35.740689 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b82c10f0-5e99-44c9-807d-e34c63b187e9" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.740697 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b82c10f0-5e99-44c9-807d-e34c63b187e9" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.740907 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b82c10f0-5e99-44c9-807d-e34c63b187e9" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.740923 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1498b040-f5c3-404a-aced-26e9b82387c9" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.740949 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="83a84d35-e4fa-4026-b4e0-21bf2b354b0f" containerName="mariadb-database-create" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.742015 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.744738 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.764435 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7dd7d6c7-jt5hv"] Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.831297 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-config\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.831348 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-nb\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.831370 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-svc\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.831401 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-swift-storage-0\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.831485 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzkfn\" (UniqueName: \"kubernetes.io/projected/e726f378-c43b-4e05-9d00-1745cbc38529-kube-api-access-hzkfn\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.831539 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-sb\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.932754 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-nb\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.932795 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-config\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.932810 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-svc\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.932827 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-swift-storage-0\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.932902 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzkfn\" (UniqueName: \"kubernetes.io/projected/e726f378-c43b-4e05-9d00-1745cbc38529-kube-api-access-hzkfn\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.932967 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-sb\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.933952 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-nb\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.933952 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-svc\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.933958 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-sb\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.934057 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-config\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.934616 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-swift-storage-0\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:35 crc kubenswrapper[5081]: I1003 15:48:35.951037 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzkfn\" (UniqueName: \"kubernetes.io/projected/e726f378-c43b-4e05-9d00-1745cbc38529-kube-api-access-hzkfn\") pod \"dnsmasq-dns-7dd7d6c7-jt5hv\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:36 crc kubenswrapper[5081]: I1003 15:48:36.062760 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:36 crc kubenswrapper[5081]: I1003 15:48:36.455637 5081 generic.go:334] "Generic (PLEG): container finished" podID="81d22c4d-1624-46de-80d5-6b366cd99bf8" containerID="84123b425f08e68c1e7ab4696972c2e9b88d67c960a2818e57a2224785f30145" exitCode=0 Oct 03 15:48:36 crc kubenswrapper[5081]: I1003 15:48:36.456458 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-g95hn" event={"ID":"81d22c4d-1624-46de-80d5-6b366cd99bf8","Type":"ContainerDied","Data":"84123b425f08e68c1e7ab4696972c2e9b88d67c960a2818e57a2224785f30145"} Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.358989 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.474201 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-combined-ca-bundle\") pod \"81d22c4d-1624-46de-80d5-6b366cd99bf8\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.474240 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-config-data\") pod \"81d22c4d-1624-46de-80d5-6b366cd99bf8\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.474306 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-db-sync-config-data\") pod \"81d22c4d-1624-46de-80d5-6b366cd99bf8\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.474383 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz7wq\" (UniqueName: \"kubernetes.io/projected/81d22c4d-1624-46de-80d5-6b366cd99bf8-kube-api-access-tz7wq\") pod \"81d22c4d-1624-46de-80d5-6b366cd99bf8\" (UID: \"81d22c4d-1624-46de-80d5-6b366cd99bf8\") " Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.476964 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-g95hn" event={"ID":"81d22c4d-1624-46de-80d5-6b366cd99bf8","Type":"ContainerDied","Data":"97a7991efef42e622ad8c1677dcb36187c3931337bb409cd5aa52ec473cc40d9"} Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.477104 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97a7991efef42e622ad8c1677dcb36187c3931337bb409cd5aa52ec473cc40d9" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.477007 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-g95hn" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.478984 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rd97m" event={"ID":"68de214d-52d3-4916-9c7f-720fc3ebc0f5","Type":"ContainerStarted","Data":"3c45fe872da14002f3ab7d1037353971c602d0ceb557b052309704e9c49f8757"} Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.479158 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "81d22c4d-1624-46de-80d5-6b366cd99bf8" (UID: "81d22c4d-1624-46de-80d5-6b366cd99bf8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.479449 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81d22c4d-1624-46de-80d5-6b366cd99bf8-kube-api-access-tz7wq" (OuterVolumeSpecName: "kube-api-access-tz7wq") pod "81d22c4d-1624-46de-80d5-6b366cd99bf8" (UID: "81d22c4d-1624-46de-80d5-6b366cd99bf8"). InnerVolumeSpecName "kube-api-access-tz7wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.502256 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-rd97m" podStartSLOduration=3.156046957 podStartE2EDuration="7.502241141s" podCreationTimestamp="2025-10-03 15:48:31 +0000 UTC" firstStartedPulling="2025-10-03 15:48:33.896450193 +0000 UTC m=+1232.862006796" lastFinishedPulling="2025-10-03 15:48:38.242644367 +0000 UTC m=+1237.208200980" observedRunningTime="2025-10-03 15:48:38.496104063 +0000 UTC m=+1237.461660676" watchObservedRunningTime="2025-10-03 15:48:38.502241141 +0000 UTC m=+1237.467797754" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.504407 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81d22c4d-1624-46de-80d5-6b366cd99bf8" (UID: "81d22c4d-1624-46de-80d5-6b366cd99bf8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.528207 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-config-data" (OuterVolumeSpecName: "config-data") pod "81d22c4d-1624-46de-80d5-6b366cd99bf8" (UID: "81d22c4d-1624-46de-80d5-6b366cd99bf8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.564493 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7dd7d6c7-jt5hv"] Oct 03 15:48:38 crc kubenswrapper[5081]: W1003 15:48:38.568943 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode726f378_c43b_4e05_9d00_1745cbc38529.slice/crio-38d575f9fe6ee39250711fe674c4c706d519b862d6b270f510e15e5705b0c770 WatchSource:0}: Error finding container 38d575f9fe6ee39250711fe674c4c706d519b862d6b270f510e15e5705b0c770: Status 404 returned error can't find the container with id 38d575f9fe6ee39250711fe674c4c706d519b862d6b270f510e15e5705b0c770 Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.577479 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.577506 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.577516 5081 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/81d22c4d-1624-46de-80d5-6b366cd99bf8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.577524 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz7wq\" (UniqueName: \"kubernetes.io/projected/81d22c4d-1624-46de-80d5-6b366cd99bf8-kube-api-access-tz7wq\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.871706 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7dd7d6c7-jt5hv"] Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.920374 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b89bbf557-69dl2"] Oct 03 15:48:38 crc kubenswrapper[5081]: E1003 15:48:38.920722 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d22c4d-1624-46de-80d5-6b366cd99bf8" containerName="glance-db-sync" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.920738 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d22c4d-1624-46de-80d5-6b366cd99bf8" containerName="glance-db-sync" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.920915 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="81d22c4d-1624-46de-80d5-6b366cd99bf8" containerName="glance-db-sync" Oct 03 15:48:38 crc kubenswrapper[5081]: I1003 15:48:38.921881 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.032077 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b89bbf557-69dl2"] Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.092697 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-config\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.092760 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-nb\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.092818 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rkn2\" (UniqueName: \"kubernetes.io/projected/98b55fc1-2ebd-4de5-8caa-195c834164ec-kube-api-access-7rkn2\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.092864 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-sb\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.092948 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-svc\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.092993 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-swift-storage-0\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.194903 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-svc\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.194959 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-swift-storage-0\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.195014 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-config\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.195035 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-nb\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.195070 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rkn2\" (UniqueName: \"kubernetes.io/projected/98b55fc1-2ebd-4de5-8caa-195c834164ec-kube-api-access-7rkn2\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.195106 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-sb\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.196123 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-svc\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.196152 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-swift-storage-0\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.196253 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-config\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.196309 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-nb\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.196768 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-sb\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.211786 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rkn2\" (UniqueName: \"kubernetes.io/projected/98b55fc1-2ebd-4de5-8caa-195c834164ec-kube-api-access-7rkn2\") pod \"dnsmasq-dns-b89bbf557-69dl2\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.237062 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.488775 5081 generic.go:334] "Generic (PLEG): container finished" podID="e726f378-c43b-4e05-9d00-1745cbc38529" containerID="10152ec8a1923757337d738d5118918b130c5fdf29463ff0492ba4a99fc86dd5" exitCode=0 Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.488852 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" event={"ID":"e726f378-c43b-4e05-9d00-1745cbc38529","Type":"ContainerDied","Data":"10152ec8a1923757337d738d5118918b130c5fdf29463ff0492ba4a99fc86dd5"} Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.489159 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" event={"ID":"e726f378-c43b-4e05-9d00-1745cbc38529","Type":"ContainerStarted","Data":"38d575f9fe6ee39250711fe674c4c706d519b862d6b270f510e15e5705b0c770"} Oct 03 15:48:39 crc kubenswrapper[5081]: I1003 15:48:39.667206 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b89bbf557-69dl2"] Oct 03 15:48:39 crc kubenswrapper[5081]: E1003 15:48:39.681156 5081 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 03 15:48:39 crc kubenswrapper[5081]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/e726f378-c43b-4e05-9d00-1745cbc38529/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 15:48:39 crc kubenswrapper[5081]: > podSandboxID="38d575f9fe6ee39250711fe674c4c706d519b862d6b270f510e15e5705b0c770" Oct 03 15:48:39 crc kubenswrapper[5081]: E1003 15:48:39.681382 5081 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 03 15:48:39 crc kubenswrapper[5081]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n66h65h58dhd9h5dch579h665h67fh9dh65h588hdfh655h9fh675hbdh85h5b9h77h698h675h5ch56dh5b4h565h66bh64chcfhcdh599h54bh5bfq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hzkfn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7dd7d6c7-jt5hv_openstack(e726f378-c43b-4e05-9d00-1745cbc38529): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/e726f378-c43b-4e05-9d00-1745cbc38529/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 15:48:39 crc kubenswrapper[5081]: > logger="UnhandledError" Oct 03 15:48:39 crc kubenswrapper[5081]: E1003 15:48:39.682520 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/e726f378-c43b-4e05-9d00-1745cbc38529/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" podUID="e726f378-c43b-4e05-9d00-1745cbc38529" Oct 03 15:48:39 crc kubenswrapper[5081]: W1003 15:48:39.744224 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98b55fc1_2ebd_4de5_8caa_195c834164ec.slice/crio-95afb93ce7e0312ecf52e3fd034be77142f9255570d0469ae85a5cf4eb2d4955 WatchSource:0}: Error finding container 95afb93ce7e0312ecf52e3fd034be77142f9255570d0469ae85a5cf4eb2d4955: Status 404 returned error can't find the container with id 95afb93ce7e0312ecf52e3fd034be77142f9255570d0469ae85a5cf4eb2d4955 Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.511256 5081 generic.go:334] "Generic (PLEG): container finished" podID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerID="ef7262f156271dea57452d21802dad28bd2f1d08ae87cfd091e98246037f9740" exitCode=0 Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.512724 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" event={"ID":"98b55fc1-2ebd-4de5-8caa-195c834164ec","Type":"ContainerDied","Data":"ef7262f156271dea57452d21802dad28bd2f1d08ae87cfd091e98246037f9740"} Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.512779 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" event={"ID":"98b55fc1-2ebd-4de5-8caa-195c834164ec","Type":"ContainerStarted","Data":"95afb93ce7e0312ecf52e3fd034be77142f9255570d0469ae85a5cf4eb2d4955"} Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.826139 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.923040 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-swift-storage-0\") pod \"e726f378-c43b-4e05-9d00-1745cbc38529\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.923091 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzkfn\" (UniqueName: \"kubernetes.io/projected/e726f378-c43b-4e05-9d00-1745cbc38529-kube-api-access-hzkfn\") pod \"e726f378-c43b-4e05-9d00-1745cbc38529\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.923121 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-sb\") pod \"e726f378-c43b-4e05-9d00-1745cbc38529\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.923144 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-config\") pod \"e726f378-c43b-4e05-9d00-1745cbc38529\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.923210 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-svc\") pod \"e726f378-c43b-4e05-9d00-1745cbc38529\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.923383 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-nb\") pod \"e726f378-c43b-4e05-9d00-1745cbc38529\" (UID: \"e726f378-c43b-4e05-9d00-1745cbc38529\") " Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.927140 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e726f378-c43b-4e05-9d00-1745cbc38529-kube-api-access-hzkfn" (OuterVolumeSpecName: "kube-api-access-hzkfn") pod "e726f378-c43b-4e05-9d00-1745cbc38529" (UID: "e726f378-c43b-4e05-9d00-1745cbc38529"). InnerVolumeSpecName "kube-api-access-hzkfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.964788 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-config" (OuterVolumeSpecName: "config") pod "e726f378-c43b-4e05-9d00-1745cbc38529" (UID: "e726f378-c43b-4e05-9d00-1745cbc38529"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.971642 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e726f378-c43b-4e05-9d00-1745cbc38529" (UID: "e726f378-c43b-4e05-9d00-1745cbc38529"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.972549 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e726f378-c43b-4e05-9d00-1745cbc38529" (UID: "e726f378-c43b-4e05-9d00-1745cbc38529"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.979127 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e726f378-c43b-4e05-9d00-1745cbc38529" (UID: "e726f378-c43b-4e05-9d00-1745cbc38529"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:40 crc kubenswrapper[5081]: I1003 15:48:40.981185 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e726f378-c43b-4e05-9d00-1745cbc38529" (UID: "e726f378-c43b-4e05-9d00-1745cbc38529"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.025058 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.025260 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.025314 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzkfn\" (UniqueName: \"kubernetes.io/projected/e726f378-c43b-4e05-9d00-1745cbc38529-kube-api-access-hzkfn\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.025391 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.025451 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.025501 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e726f378-c43b-4e05-9d00-1745cbc38529-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.524317 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" event={"ID":"98b55fc1-2ebd-4de5-8caa-195c834164ec","Type":"ContainerStarted","Data":"35aad235722d44e28ad9528437573c1d42037591892fe49589a035d8b10f81ec"} Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.524492 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.525652 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" event={"ID":"e726f378-c43b-4e05-9d00-1745cbc38529","Type":"ContainerDied","Data":"38d575f9fe6ee39250711fe674c4c706d519b862d6b270f510e15e5705b0c770"} Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.525700 5081 scope.go:117] "RemoveContainer" containerID="10152ec8a1923757337d738d5118918b130c5fdf29463ff0492ba4a99fc86dd5" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.525746 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dd7d6c7-jt5hv" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.548094 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" podStartSLOduration=3.548070413 podStartE2EDuration="3.548070413s" podCreationTimestamp="2025-10-03 15:48:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:41.546643741 +0000 UTC m=+1240.512200354" watchObservedRunningTime="2025-10-03 15:48:41.548070413 +0000 UTC m=+1240.513627026" Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.651399 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7dd7d6c7-jt5hv"] Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.664769 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7dd7d6c7-jt5hv"] Oct 03 15:48:41 crc kubenswrapper[5081]: I1003 15:48:41.849613 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e726f378-c43b-4e05-9d00-1745cbc38529" path="/var/lib/kubelet/pods/e726f378-c43b-4e05-9d00-1745cbc38529/volumes" Oct 03 15:48:43 crc kubenswrapper[5081]: I1003 15:48:43.546309 5081 generic.go:334] "Generic (PLEG): container finished" podID="68de214d-52d3-4916-9c7f-720fc3ebc0f5" containerID="3c45fe872da14002f3ab7d1037353971c602d0ceb557b052309704e9c49f8757" exitCode=0 Oct 03 15:48:43 crc kubenswrapper[5081]: I1003 15:48:43.546445 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rd97m" event={"ID":"68de214d-52d3-4916-9c7f-720fc3ebc0f5","Type":"ContainerDied","Data":"3c45fe872da14002f3ab7d1037353971c602d0ceb557b052309704e9c49f8757"} Oct 03 15:48:44 crc kubenswrapper[5081]: I1003 15:48:44.904865 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:44 crc kubenswrapper[5081]: I1003 15:48:44.998245 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-combined-ca-bundle\") pod \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " Oct 03 15:48:44 crc kubenswrapper[5081]: I1003 15:48:44.998377 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df2lb\" (UniqueName: \"kubernetes.io/projected/68de214d-52d3-4916-9c7f-720fc3ebc0f5-kube-api-access-df2lb\") pod \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " Oct 03 15:48:44 crc kubenswrapper[5081]: I1003 15:48:44.998541 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-config-data\") pod \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\" (UID: \"68de214d-52d3-4916-9c7f-720fc3ebc0f5\") " Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.007908 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68de214d-52d3-4916-9c7f-720fc3ebc0f5-kube-api-access-df2lb" (OuterVolumeSpecName: "kube-api-access-df2lb") pod "68de214d-52d3-4916-9c7f-720fc3ebc0f5" (UID: "68de214d-52d3-4916-9c7f-720fc3ebc0f5"). InnerVolumeSpecName "kube-api-access-df2lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.029601 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68de214d-52d3-4916-9c7f-720fc3ebc0f5" (UID: "68de214d-52d3-4916-9c7f-720fc3ebc0f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.052885 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-config-data" (OuterVolumeSpecName: "config-data") pod "68de214d-52d3-4916-9c7f-720fc3ebc0f5" (UID: "68de214d-52d3-4916-9c7f-720fc3ebc0f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.100048 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.100090 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df2lb\" (UniqueName: \"kubernetes.io/projected/68de214d-52d3-4916-9c7f-720fc3ebc0f5-kube-api-access-df2lb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.100109 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68de214d-52d3-4916-9c7f-720fc3ebc0f5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.562888 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rd97m" event={"ID":"68de214d-52d3-4916-9c7f-720fc3ebc0f5","Type":"ContainerDied","Data":"6a58213e9f9bdfe02d39bd2029c38fdbf3e0a7e1df8fa4f17543a174118897e3"} Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.562932 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a58213e9f9bdfe02d39bd2029c38fdbf3e0a7e1df8fa4f17543a174118897e3" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.562997 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rd97m" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.802541 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b89bbf557-69dl2"] Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.803092 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" podUID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerName="dnsmasq-dns" containerID="cri-o://35aad235722d44e28ad9528437573c1d42037591892fe49589a035d8b10f81ec" gracePeriod=10 Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.807716 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.857499 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-kj7wf"] Oct 03 15:48:45 crc kubenswrapper[5081]: E1003 15:48:45.857763 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e726f378-c43b-4e05-9d00-1745cbc38529" containerName="init" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.857774 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e726f378-c43b-4e05-9d00-1745cbc38529" containerName="init" Oct 03 15:48:45 crc kubenswrapper[5081]: E1003 15:48:45.857790 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68de214d-52d3-4916-9c7f-720fc3ebc0f5" containerName="keystone-db-sync" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.857797 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="68de214d-52d3-4916-9c7f-720fc3ebc0f5" containerName="keystone-db-sync" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.857939 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="68de214d-52d3-4916-9c7f-720fc3ebc0f5" containerName="keystone-db-sync" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.857953 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e726f378-c43b-4e05-9d00-1745cbc38529" containerName="init" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.858412 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cb5579769-n8c8l"] Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.859910 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.860364 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.862219 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.862824 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.863421 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mjd9n" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.868309 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.882429 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kj7wf"] Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.910312 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cb5579769-n8c8l"] Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916489 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-config\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916593 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-scripts\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916668 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhc2b\" (UniqueName: \"kubernetes.io/projected/d64f8e16-5199-499b-82ad-e0289139400c-kube-api-access-nhc2b\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916707 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-svc\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916797 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-credential-keys\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916860 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916890 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-fernet-keys\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916922 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j82ps\" (UniqueName: \"kubernetes.io/projected/3c601e67-8652-432d-8f9f-0d9b0b108c87-kube-api-access-j82ps\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.916980 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-combined-ca-bundle\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.917020 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.917092 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-config-data\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:45 crc kubenswrapper[5081]: I1003 15:48:45.917132 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-swift-storage-0\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018189 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-scripts\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018252 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhc2b\" (UniqueName: \"kubernetes.io/projected/d64f8e16-5199-499b-82ad-e0289139400c-kube-api-access-nhc2b\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018275 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-svc\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018320 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-credential-keys\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018347 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018364 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-fernet-keys\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018382 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j82ps\" (UniqueName: \"kubernetes.io/projected/3c601e67-8652-432d-8f9f-0d9b0b108c87-kube-api-access-j82ps\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018681 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-combined-ca-bundle\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018704 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018745 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-config-data\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018770 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-swift-storage-0\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.018820 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-config\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.019691 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-config\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.021256 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.021964 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-svc\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.024473 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.025089 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-swift-storage-0\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.035549 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-fernet-keys\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.041408 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-config-data\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.061196 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-combined-ca-bundle\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.061499 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-scripts\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.071094 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-credential-keys\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.085373 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhc2b\" (UniqueName: \"kubernetes.io/projected/d64f8e16-5199-499b-82ad-e0289139400c-kube-api-access-nhc2b\") pod \"keystone-bootstrap-kj7wf\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.097655 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j82ps\" (UniqueName: \"kubernetes.io/projected/3c601e67-8652-432d-8f9f-0d9b0b108c87-kube-api-access-j82ps\") pod \"dnsmasq-dns-6cb5579769-n8c8l\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.126113 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.133031 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.137847 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.138104 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.149095 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.183976 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.220033 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-9759p"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.221209 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.225739 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-vcqdl" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.225968 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.226082 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.227042 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.227108 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-config-data\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.227139 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-run-httpd\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.227164 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.227216 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkjms\" (UniqueName: \"kubernetes.io/projected/f5431832-6fed-4beb-94b5-f2c1c4917d49-kube-api-access-bkjms\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.227240 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-scripts\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.227271 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-log-httpd\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.245139 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb5579769-n8c8l"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.258684 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-9759p"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.263779 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.282485 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55b4c6976c-nbgt8"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.284928 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.309254 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55b4c6976c-nbgt8"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331711 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331765 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-config-data\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331791 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-sb\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331812 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-run-httpd\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331833 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331860 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-config-data\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331881 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-combined-ca-bundle\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331907 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-nb\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331931 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnwqz\" (UniqueName: \"kubernetes.io/projected/d39b47d6-5c20-46d7-8a31-65605a26ceb3-kube-api-access-rnwqz\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331950 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkjms\" (UniqueName: \"kubernetes.io/projected/f5431832-6fed-4beb-94b5-f2c1c4917d49-kube-api-access-bkjms\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331975 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-scripts\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.331999 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdc78530-6030-49c0-8e3e-ae1b26430a90-logs\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.332020 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khrvc\" (UniqueName: \"kubernetes.io/projected/cdc78530-6030-49c0-8e3e-ae1b26430a90-kube-api-access-khrvc\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.332040 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-log-httpd\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.332062 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-swift-storage-0\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.332085 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-svc\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.332102 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-scripts\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.332116 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-config\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.336500 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-run-httpd\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.336638 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.337063 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-log-httpd\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.337362 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-config-data\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.342448 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.342525 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-scripts\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.373369 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkjms\" (UniqueName: \"kubernetes.io/projected/f5431832-6fed-4beb-94b5-f2c1c4917d49-kube-api-access-bkjms\") pod \"ceilometer-0\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.434827 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-sb\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.434956 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-config-data\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.434989 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-combined-ca-bundle\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435027 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-nb\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435065 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnwqz\" (UniqueName: \"kubernetes.io/projected/d39b47d6-5c20-46d7-8a31-65605a26ceb3-kube-api-access-rnwqz\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435114 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdc78530-6030-49c0-8e3e-ae1b26430a90-logs\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435144 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khrvc\" (UniqueName: \"kubernetes.io/projected/cdc78530-6030-49c0-8e3e-ae1b26430a90-kube-api-access-khrvc\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435218 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-swift-storage-0\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435254 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-svc\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435280 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-scripts\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435309 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-config\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435880 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-sb\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.435970 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-nb\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.436511 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdc78530-6030-49c0-8e3e-ae1b26430a90-logs\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.436974 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-swift-storage-0\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.437308 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-svc\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.440278 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-scripts\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.440524 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-combined-ca-bundle\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.441092 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-config-data\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.442251 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-config\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.453253 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khrvc\" (UniqueName: \"kubernetes.io/projected/cdc78530-6030-49c0-8e3e-ae1b26430a90-kube-api-access-khrvc\") pod \"placement-db-sync-9759p\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.455398 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnwqz\" (UniqueName: \"kubernetes.io/projected/d39b47d6-5c20-46d7-8a31-65605a26ceb3-kube-api-access-rnwqz\") pod \"dnsmasq-dns-55b4c6976c-nbgt8\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.471260 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.619009 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9759p" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.647394 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.656740 5081 generic.go:334] "Generic (PLEG): container finished" podID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerID="35aad235722d44e28ad9528437573c1d42037591892fe49589a035d8b10f81ec" exitCode=0 Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.656785 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" event={"ID":"98b55fc1-2ebd-4de5-8caa-195c834164ec","Type":"ContainerDied","Data":"35aad235722d44e28ad9528437573c1d42037591892fe49589a035d8b10f81ec"} Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.775091 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb5579769-n8c8l"] Oct 03 15:48:46 crc kubenswrapper[5081]: W1003 15:48:46.785301 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c601e67_8652_432d_8f9f_0d9b0b108c87.slice/crio-15d484217065b70c66d657972eaa24e40eb4494263b0e603f66e622409b63f9d WatchSource:0}: Error finding container 15d484217065b70c66d657972eaa24e40eb4494263b0e603f66e622409b63f9d: Status 404 returned error can't find the container with id 15d484217065b70c66d657972eaa24e40eb4494263b0e603f66e622409b63f9d Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.915943 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kj7wf"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.985508 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.987249 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.991596 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.991620 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.991685 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-xbxgf" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.991915 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 15:48:46 crc kubenswrapper[5081]: I1003 15:48:46.998412 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056356 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056395 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056420 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbjtr\" (UniqueName: \"kubernetes.io/projected/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-kube-api-access-kbjtr\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056487 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056513 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-scripts\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056549 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056585 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-logs\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.056618 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-config-data\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.102980 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.104767 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.107892 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.116459 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.117341 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.120725 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160534 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160670 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160704 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-logs\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160728 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-config-data\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160794 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfj4x\" (UniqueName: \"kubernetes.io/projected/4186afd8-0940-41b6-91b1-522b7ba15413-kube-api-access-xfj4x\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160826 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160853 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-logs\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160875 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160900 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160933 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbjtr\" (UniqueName: \"kubernetes.io/projected/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-kube-api-access-kbjtr\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.160989 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.161016 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.161054 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.161077 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.161104 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.161139 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.161168 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-scripts\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.163603 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-logs\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.164727 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.170314 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.172779 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.173187 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-scripts\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.173457 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-config-data\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.187140 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbjtr\" (UniqueName: \"kubernetes.io/projected/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-kube-api-access-kbjtr\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.244309 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.262396 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-config\") pod \"98b55fc1-2ebd-4de5-8caa-195c834164ec\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.262522 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-svc\") pod \"98b55fc1-2ebd-4de5-8caa-195c834164ec\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.262582 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-sb\") pod \"98b55fc1-2ebd-4de5-8caa-195c834164ec\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.262628 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rkn2\" (UniqueName: \"kubernetes.io/projected/98b55fc1-2ebd-4de5-8caa-195c834164ec-kube-api-access-7rkn2\") pod \"98b55fc1-2ebd-4de5-8caa-195c834164ec\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.262682 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-swift-storage-0\") pod \"98b55fc1-2ebd-4de5-8caa-195c834164ec\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.262754 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-nb\") pod \"98b55fc1-2ebd-4de5-8caa-195c834164ec\" (UID: \"98b55fc1-2ebd-4de5-8caa-195c834164ec\") " Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.262990 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.263040 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.263057 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.263100 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.263149 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.263234 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfj4x\" (UniqueName: \"kubernetes.io/projected/4186afd8-0940-41b6-91b1-522b7ba15413-kube-api-access-xfj4x\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.263277 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-logs\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.263300 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.266775 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.266776 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-logs\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.268453 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.270310 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.296537 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.302233 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.306500 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfj4x\" (UniqueName: \"kubernetes.io/projected/4186afd8-0940-41b6-91b1-522b7ba15413-kube-api-access-xfj4x\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.306623 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.312940 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98b55fc1-2ebd-4de5-8caa-195c834164ec-kube-api-access-7rkn2" (OuterVolumeSpecName: "kube-api-access-7rkn2") pod "98b55fc1-2ebd-4de5-8caa-195c834164ec" (UID: "98b55fc1-2ebd-4de5-8caa-195c834164ec"). InnerVolumeSpecName "kube-api-access-7rkn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:47 crc kubenswrapper[5081]: W1003 15:48:47.327193 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5431832_6fed_4beb_94b5_f2c1c4917d49.slice/crio-5a49898093a6b654d7e4bc7c225a759f030411537077d385bf5892d16cb9e569 WatchSource:0}: Error finding container 5a49898093a6b654d7e4bc7c225a759f030411537077d385bf5892d16cb9e569: Status 404 returned error can't find the container with id 5a49898093a6b654d7e4bc7c225a759f030411537077d385bf5892d16cb9e569 Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.331890 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-9759p"] Oct 03 15:48:47 crc kubenswrapper[5081]: W1003 15:48:47.363956 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcdc78530_6030_49c0_8e3e_ae1b26430a90.slice/crio-18d0cfae8e2e0fef32d2e5fe717fd2502335327c20991b2040fb541b76c6b391 WatchSource:0}: Error finding container 18d0cfae8e2e0fef32d2e5fe717fd2502335327c20991b2040fb541b76c6b391: Status 404 returned error can't find the container with id 18d0cfae8e2e0fef32d2e5fe717fd2502335327c20991b2040fb541b76c6b391 Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.366406 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rkn2\" (UniqueName: \"kubernetes.io/projected/98b55fc1-2ebd-4de5-8caa-195c834164ec-kube-api-access-7rkn2\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.407510 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55b4c6976c-nbgt8"] Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.438286 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.452938 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.459854 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.463302 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "98b55fc1-2ebd-4de5-8caa-195c834164ec" (UID: "98b55fc1-2ebd-4de5-8caa-195c834164ec"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.463414 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.467684 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.482404 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "98b55fc1-2ebd-4de5-8caa-195c834164ec" (UID: "98b55fc1-2ebd-4de5-8caa-195c834164ec"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.491660 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "98b55fc1-2ebd-4de5-8caa-195c834164ec" (UID: "98b55fc1-2ebd-4de5-8caa-195c834164ec"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.510054 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "98b55fc1-2ebd-4de5-8caa-195c834164ec" (UID: "98b55fc1-2ebd-4de5-8caa-195c834164ec"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.523850 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-config" (OuterVolumeSpecName: "config") pod "98b55fc1-2ebd-4de5-8caa-195c834164ec" (UID: "98b55fc1-2ebd-4de5-8caa-195c834164ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.569469 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.569814 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.569960 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.569984 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98b55fc1-2ebd-4de5-8caa-195c834164ec-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.666937 5081 generic.go:334] "Generic (PLEG): container finished" podID="3c601e67-8652-432d-8f9f-0d9b0b108c87" containerID="7a5a1dae925768c5010b9320425b5b4e68ae58d4b6e7d12e189cad0c8cd70143" exitCode=0 Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.667009 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" event={"ID":"3c601e67-8652-432d-8f9f-0d9b0b108c87","Type":"ContainerDied","Data":"7a5a1dae925768c5010b9320425b5b4e68ae58d4b6e7d12e189cad0c8cd70143"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.667038 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" event={"ID":"3c601e67-8652-432d-8f9f-0d9b0b108c87","Type":"ContainerStarted","Data":"15d484217065b70c66d657972eaa24e40eb4494263b0e603f66e622409b63f9d"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.668310 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9759p" event={"ID":"cdc78530-6030-49c0-8e3e-ae1b26430a90","Type":"ContainerStarted","Data":"18d0cfae8e2e0fef32d2e5fe717fd2502335327c20991b2040fb541b76c6b391"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.671427 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" event={"ID":"98b55fc1-2ebd-4de5-8caa-195c834164ec","Type":"ContainerDied","Data":"95afb93ce7e0312ecf52e3fd034be77142f9255570d0469ae85a5cf4eb2d4955"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.671464 5081 scope.go:117] "RemoveContainer" containerID="35aad235722d44e28ad9528437573c1d42037591892fe49589a035d8b10f81ec" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.671607 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b89bbf557-69dl2" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.684028 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kj7wf" event={"ID":"d64f8e16-5199-499b-82ad-e0289139400c","Type":"ContainerStarted","Data":"48a09e9ca64de36546e072756a97d761a030cb3e742c290692500230d25e59fd"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.684064 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kj7wf" event={"ID":"d64f8e16-5199-499b-82ad-e0289139400c","Type":"ContainerStarted","Data":"3e5a306f6be72f56e1d6ddbf3990b7a01698f4d4dd682eeb492a50efa4c9d560"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.689952 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerStarted","Data":"5a49898093a6b654d7e4bc7c225a759f030411537077d385bf5892d16cb9e569"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.704967 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" event={"ID":"d39b47d6-5c20-46d7-8a31-65605a26ceb3","Type":"ContainerStarted","Data":"8066050777aa2f1f7d5dbde0aafb6b883a758f0b0546e4ce68a5a3f99ee21c91"} Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.706209 5081 scope.go:117] "RemoveContainer" containerID="ef7262f156271dea57452d21802dad28bd2f1d08ae87cfd091e98246037f9740" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.727508 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-kj7wf" podStartSLOduration=2.727486684 podStartE2EDuration="2.727486684s" podCreationTimestamp="2025-10-03 15:48:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:47.709878355 +0000 UTC m=+1246.675434968" watchObservedRunningTime="2025-10-03 15:48:47.727486684 +0000 UTC m=+1246.693043297" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.743021 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b89bbf557-69dl2"] Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.757908 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b89bbf557-69dl2"] Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.845686 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98b55fc1-2ebd-4de5-8caa-195c834164ec" path="/var/lib/kubelet/pods/98b55fc1-2ebd-4de5-8caa-195c834164ec/volumes" Oct 03 15:48:47 crc kubenswrapper[5081]: I1003 15:48:47.878268 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.021631 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.088949 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.106035 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.217485 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.296295 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.485238 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-config\") pod \"3c601e67-8652-432d-8f9f-0d9b0b108c87\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.485300 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j82ps\" (UniqueName: \"kubernetes.io/projected/3c601e67-8652-432d-8f9f-0d9b0b108c87-kube-api-access-j82ps\") pod \"3c601e67-8652-432d-8f9f-0d9b0b108c87\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.486143 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-sb\") pod \"3c601e67-8652-432d-8f9f-0d9b0b108c87\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.486198 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-nb\") pod \"3c601e67-8652-432d-8f9f-0d9b0b108c87\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.486301 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-swift-storage-0\") pod \"3c601e67-8652-432d-8f9f-0d9b0b108c87\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.486322 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-svc\") pod \"3c601e67-8652-432d-8f9f-0d9b0b108c87\" (UID: \"3c601e67-8652-432d-8f9f-0d9b0b108c87\") " Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.495926 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c601e67-8652-432d-8f9f-0d9b0b108c87-kube-api-access-j82ps" (OuterVolumeSpecName: "kube-api-access-j82ps") pod "3c601e67-8652-432d-8f9f-0d9b0b108c87" (UID: "3c601e67-8652-432d-8f9f-0d9b0b108c87"). InnerVolumeSpecName "kube-api-access-j82ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.520478 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3c601e67-8652-432d-8f9f-0d9b0b108c87" (UID: "3c601e67-8652-432d-8f9f-0d9b0b108c87"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.521686 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3c601e67-8652-432d-8f9f-0d9b0b108c87" (UID: "3c601e67-8652-432d-8f9f-0d9b0b108c87"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.525024 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3c601e67-8652-432d-8f9f-0d9b0b108c87" (UID: "3c601e67-8652-432d-8f9f-0d9b0b108c87"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.525230 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-config" (OuterVolumeSpecName: "config") pod "3c601e67-8652-432d-8f9f-0d9b0b108c87" (UID: "3c601e67-8652-432d-8f9f-0d9b0b108c87"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.529852 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c601e67-8652-432d-8f9f-0d9b0b108c87" (UID: "3c601e67-8652-432d-8f9f-0d9b0b108c87"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.588043 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.588072 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.588081 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.588091 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j82ps\" (UniqueName: \"kubernetes.io/projected/3c601e67-8652-432d-8f9f-0d9b0b108c87-kube-api-access-j82ps\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.588101 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.588109 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c601e67-8652-432d-8f9f-0d9b0b108c87-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.754378 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6ede-account-create-fxvgw"] Oct 03 15:48:48 crc kubenswrapper[5081]: E1003 15:48:48.755160 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c601e67-8652-432d-8f9f-0d9b0b108c87" containerName="init" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.755502 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c601e67-8652-432d-8f9f-0d9b0b108c87" containerName="init" Oct 03 15:48:48 crc kubenswrapper[5081]: E1003 15:48:48.755536 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerName="dnsmasq-dns" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.755546 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerName="dnsmasq-dns" Oct 03 15:48:48 crc kubenswrapper[5081]: E1003 15:48:48.755578 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerName="init" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.755588 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerName="init" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.755858 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c601e67-8652-432d-8f9f-0d9b0b108c87" containerName="init" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.755910 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="98b55fc1-2ebd-4de5-8caa-195c834164ec" containerName="dnsmasq-dns" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.756654 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6ede-account-create-fxvgw" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.759897 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.767536 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6ede-account-create-fxvgw"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.792168 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjldp\" (UniqueName: \"kubernetes.io/projected/090ece03-2db4-4ab8-9ae0-a18b750bdc8b-kube-api-access-gjldp\") pod \"barbican-6ede-account-create-fxvgw\" (UID: \"090ece03-2db4-4ab8-9ae0-a18b750bdc8b\") " pod="openstack/barbican-6ede-account-create-fxvgw" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.809006 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4186afd8-0940-41b6-91b1-522b7ba15413","Type":"ContainerStarted","Data":"3435d9f0e22240edc1a31b3bc63cdecdce2f9106c751ebe0a06f5f62465d56a0"} Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.809055 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4186afd8-0940-41b6-91b1-522b7ba15413","Type":"ContainerStarted","Data":"93fd9385db220935f0cab1605fe15582a73e0d6c483914fc9ab73000f6f09381"} Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.812801 5081 generic.go:334] "Generic (PLEG): container finished" podID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerID="e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f" exitCode=0 Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.812854 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" event={"ID":"d39b47d6-5c20-46d7-8a31-65605a26ceb3","Type":"ContainerDied","Data":"e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f"} Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.818330 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" event={"ID":"3c601e67-8652-432d-8f9f-0d9b0b108c87","Type":"ContainerDied","Data":"15d484217065b70c66d657972eaa24e40eb4494263b0e603f66e622409b63f9d"} Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.818398 5081 scope.go:117] "RemoveContainer" containerID="7a5a1dae925768c5010b9320425b5b4e68ae58d4b6e7d12e189cad0c8cd70143" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.818415 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb5579769-n8c8l" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.848184 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8","Type":"ContainerStarted","Data":"b55e937b95dcb2e054753c74b155f8fc1f5520d58f99b751e2c5dd7df986a0bd"} Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.849703 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-edee-account-create-6jgz9"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.851866 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edee-account-create-6jgz9" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.855722 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.884718 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-edee-account-create-6jgz9"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.899201 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-866gc\" (UniqueName: \"kubernetes.io/projected/ea8e8f30-c5b5-4fc5-a692-75b234dfe04d-kube-api-access-866gc\") pod \"cinder-edee-account-create-6jgz9\" (UID: \"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d\") " pod="openstack/cinder-edee-account-create-6jgz9" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.900250 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjldp\" (UniqueName: \"kubernetes.io/projected/090ece03-2db4-4ab8-9ae0-a18b750bdc8b-kube-api-access-gjldp\") pod \"barbican-6ede-account-create-fxvgw\" (UID: \"090ece03-2db4-4ab8-9ae0-a18b750bdc8b\") " pod="openstack/barbican-6ede-account-create-fxvgw" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.912516 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb5579769-n8c8l"] Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.916241 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjldp\" (UniqueName: \"kubernetes.io/projected/090ece03-2db4-4ab8-9ae0-a18b750bdc8b-kube-api-access-gjldp\") pod \"barbican-6ede-account-create-fxvgw\" (UID: \"090ece03-2db4-4ab8-9ae0-a18b750bdc8b\") " pod="openstack/barbican-6ede-account-create-fxvgw" Oct 03 15:48:48 crc kubenswrapper[5081]: I1003 15:48:48.921343 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cb5579769-n8c8l"] Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.006622 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-866gc\" (UniqueName: \"kubernetes.io/projected/ea8e8f30-c5b5-4fc5-a692-75b234dfe04d-kube-api-access-866gc\") pod \"cinder-edee-account-create-6jgz9\" (UID: \"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d\") " pod="openstack/cinder-edee-account-create-6jgz9" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.039192 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-866gc\" (UniqueName: \"kubernetes.io/projected/ea8e8f30-c5b5-4fc5-a692-75b234dfe04d-kube-api-access-866gc\") pod \"cinder-edee-account-create-6jgz9\" (UID: \"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d\") " pod="openstack/cinder-edee-account-create-6jgz9" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.045269 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-4548-account-create-pphwt"] Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.046910 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4548-account-create-pphwt" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.050790 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.062350 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-4548-account-create-pphwt"] Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.081091 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6ede-account-create-fxvgw" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.109184 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlghv\" (UniqueName: \"kubernetes.io/projected/322d9ed5-f62f-4984-b44c-013901c00c9a-kube-api-access-mlghv\") pod \"neutron-4548-account-create-pphwt\" (UID: \"322d9ed5-f62f-4984-b44c-013901c00c9a\") " pod="openstack/neutron-4548-account-create-pphwt" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.192590 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edee-account-create-6jgz9" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.211165 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlghv\" (UniqueName: \"kubernetes.io/projected/322d9ed5-f62f-4984-b44c-013901c00c9a-kube-api-access-mlghv\") pod \"neutron-4548-account-create-pphwt\" (UID: \"322d9ed5-f62f-4984-b44c-013901c00c9a\") " pod="openstack/neutron-4548-account-create-pphwt" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.249780 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlghv\" (UniqueName: \"kubernetes.io/projected/322d9ed5-f62f-4984-b44c-013901c00c9a-kube-api-access-mlghv\") pod \"neutron-4548-account-create-pphwt\" (UID: \"322d9ed5-f62f-4984-b44c-013901c00c9a\") " pod="openstack/neutron-4548-account-create-pphwt" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.495521 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4548-account-create-pphwt" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.640000 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6ede-account-create-fxvgw"] Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.753726 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-edee-account-create-6jgz9"] Oct 03 15:48:49 crc kubenswrapper[5081]: W1003 15:48:49.779679 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea8e8f30_c5b5_4fc5_a692_75b234dfe04d.slice/crio-4f1c9fe903b33e56749820c229a452656339a1993326bd47b207d4d7a88ab8bd WatchSource:0}: Error finding container 4f1c9fe903b33e56749820c229a452656339a1993326bd47b207d4d7a88ab8bd: Status 404 returned error can't find the container with id 4f1c9fe903b33e56749820c229a452656339a1993326bd47b207d4d7a88ab8bd Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.845529 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c601e67-8652-432d-8f9f-0d9b0b108c87" path="/var/lib/kubelet/pods/3c601e67-8652-432d-8f9f-0d9b0b108c87/volumes" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.888101 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8","Type":"ContainerStarted","Data":"537ba3f14b34bfae9ac860071a470308327689fbf78e26c13a8a50303151c52a"} Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.893458 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6ede-account-create-fxvgw" event={"ID":"090ece03-2db4-4ab8-9ae0-a18b750bdc8b","Type":"ContainerStarted","Data":"869ede9535f17c1ba5990107f05c318aa5c3f4e8b7a2acdb05211c7de0bb2e67"} Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.895124 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-edee-account-create-6jgz9" event={"ID":"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d","Type":"ContainerStarted","Data":"4f1c9fe903b33e56749820c229a452656339a1993326bd47b207d4d7a88ab8bd"} Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.903604 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4186afd8-0940-41b6-91b1-522b7ba15413","Type":"ContainerStarted","Data":"6146b16ad5ab0a04856fd52b2dd2b0069ad92afb5a9882ef8774915960a6f925"} Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.903660 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-log" containerID="cri-o://3435d9f0e22240edc1a31b3bc63cdecdce2f9106c751ebe0a06f5f62465d56a0" gracePeriod=30 Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.903731 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-httpd" containerID="cri-o://6146b16ad5ab0a04856fd52b2dd2b0069ad92afb5a9882ef8774915960a6f925" gracePeriod=30 Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.909201 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" event={"ID":"d39b47d6-5c20-46d7-8a31-65605a26ceb3","Type":"ContainerStarted","Data":"027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c"} Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.910018 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:49 crc kubenswrapper[5081]: I1003 15:48:49.930761 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.930739904 podStartE2EDuration="3.930739904s" podCreationTimestamp="2025-10-03 15:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:49.922036492 +0000 UTC m=+1248.887593105" watchObservedRunningTime="2025-10-03 15:48:49.930739904 +0000 UTC m=+1248.896296517" Oct 03 15:48:49 crc kubenswrapper[5081]: E1003 15:48:49.945506 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4186afd8_0940_41b6_91b1_522b7ba15413.slice/crio-3435d9f0e22240edc1a31b3bc63cdecdce2f9106c751ebe0a06f5f62465d56a0.scope\": RecentStats: unable to find data in memory cache]" Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.061646 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" podStartSLOduration=4.061623973 podStartE2EDuration="4.061623973s" podCreationTimestamp="2025-10-03 15:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:49.947172489 +0000 UTC m=+1248.912729122" watchObservedRunningTime="2025-10-03 15:48:50.061623973 +0000 UTC m=+1249.027180586" Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.064004 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-4548-account-create-pphwt"] Oct 03 15:48:50 crc kubenswrapper[5081]: W1003 15:48:50.084494 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod322d9ed5_f62f_4984_b44c_013901c00c9a.slice/crio-b153b5a21a3e63350941f059c22ea081f863630199235e44e2b9cbdfd3dcd144 WatchSource:0}: Error finding container b153b5a21a3e63350941f059c22ea081f863630199235e44e2b9cbdfd3dcd144: Status 404 returned error can't find the container with id b153b5a21a3e63350941f059c22ea081f863630199235e44e2b9cbdfd3dcd144 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.927685 5081 generic.go:334] "Generic (PLEG): container finished" podID="322d9ed5-f62f-4984-b44c-013901c00c9a" containerID="d4899cf45c8e57fa395d11224d614fa21d3883f4753e27ab3a856ddce378f669" exitCode=0 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.928092 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4548-account-create-pphwt" event={"ID":"322d9ed5-f62f-4984-b44c-013901c00c9a","Type":"ContainerDied","Data":"d4899cf45c8e57fa395d11224d614fa21d3883f4753e27ab3a856ddce378f669"} Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.928127 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4548-account-create-pphwt" event={"ID":"322d9ed5-f62f-4984-b44c-013901c00c9a","Type":"ContainerStarted","Data":"b153b5a21a3e63350941f059c22ea081f863630199235e44e2b9cbdfd3dcd144"} Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.937505 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8","Type":"ContainerStarted","Data":"fd0054759f6ce9dff50f0f16e38219f0d55088aeede0fbdd7650c5e9b20a631c"} Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.937719 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-log" containerID="cri-o://537ba3f14b34bfae9ac860071a470308327689fbf78e26c13a8a50303151c52a" gracePeriod=30 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.938092 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-httpd" containerID="cri-o://fd0054759f6ce9dff50f0f16e38219f0d55088aeede0fbdd7650c5e9b20a631c" gracePeriod=30 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.946532 5081 generic.go:334] "Generic (PLEG): container finished" podID="090ece03-2db4-4ab8-9ae0-a18b750bdc8b" containerID="46324de20afe49419bacc89b855b20626c4259348867df225ac35af6b0ff3964" exitCode=0 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.946625 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6ede-account-create-fxvgw" event={"ID":"090ece03-2db4-4ab8-9ae0-a18b750bdc8b","Type":"ContainerDied","Data":"46324de20afe49419bacc89b855b20626c4259348867df225ac35af6b0ff3964"} Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.954434 5081 generic.go:334] "Generic (PLEG): container finished" podID="ea8e8f30-c5b5-4fc5-a692-75b234dfe04d" containerID="c9113f6ae45f47c36f72929f9ef5d16930dbce5fe4eaf6bc4e6ccb31c7482d8a" exitCode=0 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.954541 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-edee-account-create-6jgz9" event={"ID":"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d","Type":"ContainerDied","Data":"c9113f6ae45f47c36f72929f9ef5d16930dbce5fe4eaf6bc4e6ccb31c7482d8a"} Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.962854 5081 generic.go:334] "Generic (PLEG): container finished" podID="4186afd8-0940-41b6-91b1-522b7ba15413" containerID="6146b16ad5ab0a04856fd52b2dd2b0069ad92afb5a9882ef8774915960a6f925" exitCode=143 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.964167 5081 generic.go:334] "Generic (PLEG): container finished" podID="4186afd8-0940-41b6-91b1-522b7ba15413" containerID="3435d9f0e22240edc1a31b3bc63cdecdce2f9106c751ebe0a06f5f62465d56a0" exitCode=143 Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.964153 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4186afd8-0940-41b6-91b1-522b7ba15413","Type":"ContainerDied","Data":"6146b16ad5ab0a04856fd52b2dd2b0069ad92afb5a9882ef8774915960a6f925"} Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.964211 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4186afd8-0940-41b6-91b1-522b7ba15413","Type":"ContainerDied","Data":"3435d9f0e22240edc1a31b3bc63cdecdce2f9106c751ebe0a06f5f62465d56a0"} Oct 03 15:48:50 crc kubenswrapper[5081]: I1003 15:48:50.972899 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.972876832 podStartE2EDuration="5.972876832s" podCreationTimestamp="2025-10-03 15:48:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:50.964308154 +0000 UTC m=+1249.929864787" watchObservedRunningTime="2025-10-03 15:48:50.972876832 +0000 UTC m=+1249.938433445" Oct 03 15:48:51 crc kubenswrapper[5081]: I1003 15:48:51.973867 5081 generic.go:334] "Generic (PLEG): container finished" podID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerID="fd0054759f6ce9dff50f0f16e38219f0d55088aeede0fbdd7650c5e9b20a631c" exitCode=0 Oct 03 15:48:51 crc kubenswrapper[5081]: I1003 15:48:51.974192 5081 generic.go:334] "Generic (PLEG): container finished" podID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerID="537ba3f14b34bfae9ac860071a470308327689fbf78e26c13a8a50303151c52a" exitCode=143 Oct 03 15:48:51 crc kubenswrapper[5081]: I1003 15:48:51.973942 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8","Type":"ContainerDied","Data":"fd0054759f6ce9dff50f0f16e38219f0d55088aeede0fbdd7650c5e9b20a631c"} Oct 03 15:48:51 crc kubenswrapper[5081]: I1003 15:48:51.974268 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8","Type":"ContainerDied","Data":"537ba3f14b34bfae9ac860071a470308327689fbf78e26c13a8a50303151c52a"} Oct 03 15:48:51 crc kubenswrapper[5081]: I1003 15:48:51.976302 5081 generic.go:334] "Generic (PLEG): container finished" podID="d64f8e16-5199-499b-82ad-e0289139400c" containerID="48a09e9ca64de36546e072756a97d761a030cb3e742c290692500230d25e59fd" exitCode=0 Oct 03 15:48:51 crc kubenswrapper[5081]: I1003 15:48:51.976488 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kj7wf" event={"ID":"d64f8e16-5199-499b-82ad-e0289139400c","Type":"ContainerDied","Data":"48a09e9ca64de36546e072756a97d761a030cb3e742c290692500230d25e59fd"} Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.924730 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.931791 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfj4x\" (UniqueName: \"kubernetes.io/projected/4186afd8-0940-41b6-91b1-522b7ba15413-kube-api-access-xfj4x\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.931844 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.931873 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-config-data\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.931929 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-logs\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.931991 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-scripts\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.932021 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-httpd-run\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.932054 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-internal-tls-certs\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.932087 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-combined-ca-bundle\") pod \"4186afd8-0940-41b6-91b1-522b7ba15413\" (UID: \"4186afd8-0940-41b6-91b1-522b7ba15413\") " Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.934412 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.934686 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-logs" (OuterVolumeSpecName: "logs") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.937717 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-scripts" (OuterVolumeSpecName: "scripts") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.937772 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.938279 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4186afd8-0940-41b6-91b1-522b7ba15413-kube-api-access-xfj4x" (OuterVolumeSpecName: "kube-api-access-xfj4x") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "kube-api-access-xfj4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.971199 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6ede-account-create-fxvgw" Oct 03 15:48:54 crc kubenswrapper[5081]: I1003 15:48:54.997073 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edee-account-create-6jgz9" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.005117 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.009295 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.013198 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-4548-account-create-pphwt" event={"ID":"322d9ed5-f62f-4984-b44c-013901c00c9a","Type":"ContainerDied","Data":"b153b5a21a3e63350941f059c22ea081f863630199235e44e2b9cbdfd3dcd144"} Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.013243 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b153b5a21a3e63350941f059c22ea081f863630199235e44e2b9cbdfd3dcd144" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.016734 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6ede-account-create-fxvgw" event={"ID":"090ece03-2db4-4ab8-9ae0-a18b750bdc8b","Type":"ContainerDied","Data":"869ede9535f17c1ba5990107f05c318aa5c3f4e8b7a2acdb05211c7de0bb2e67"} Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.016776 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="869ede9535f17c1ba5990107f05c318aa5c3f4e8b7a2acdb05211c7de0bb2e67" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.016860 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6ede-account-create-fxvgw" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.018860 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4548-account-create-pphwt" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.020397 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kj7wf" event={"ID":"d64f8e16-5199-499b-82ad-e0289139400c","Type":"ContainerDied","Data":"3e5a306f6be72f56e1d6ddbf3990b7a01698f4d4dd682eeb492a50efa4c9d560"} Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.020505 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e5a306f6be72f56e1d6ddbf3990b7a01698f4d4dd682eeb492a50efa4c9d560" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.020655 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kj7wf" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.022647 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-edee-account-create-6jgz9" event={"ID":"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d","Type":"ContainerDied","Data":"4f1c9fe903b33e56749820c229a452656339a1993326bd47b207d4d7a88ab8bd"} Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.022742 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f1c9fe903b33e56749820c229a452656339a1993326bd47b207d4d7a88ab8bd" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.022823 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edee-account-create-6jgz9" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.033538 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-combined-ca-bundle\") pod \"d64f8e16-5199-499b-82ad-e0289139400c\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034245 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-866gc\" (UniqueName: \"kubernetes.io/projected/ea8e8f30-c5b5-4fc5-a692-75b234dfe04d-kube-api-access-866gc\") pod \"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d\" (UID: \"ea8e8f30-c5b5-4fc5-a692-75b234dfe04d\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034278 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjldp\" (UniqueName: \"kubernetes.io/projected/090ece03-2db4-4ab8-9ae0-a18b750bdc8b-kube-api-access-gjldp\") pod \"090ece03-2db4-4ab8-9ae0-a18b750bdc8b\" (UID: \"090ece03-2db4-4ab8-9ae0-a18b750bdc8b\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034319 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-fernet-keys\") pod \"d64f8e16-5199-499b-82ad-e0289139400c\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034340 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlghv\" (UniqueName: \"kubernetes.io/projected/322d9ed5-f62f-4984-b44c-013901c00c9a-kube-api-access-mlghv\") pod \"322d9ed5-f62f-4984-b44c-013901c00c9a\" (UID: \"322d9ed5-f62f-4984-b44c-013901c00c9a\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034379 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-config-data\") pod \"d64f8e16-5199-499b-82ad-e0289139400c\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034401 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhc2b\" (UniqueName: \"kubernetes.io/projected/d64f8e16-5199-499b-82ad-e0289139400c-kube-api-access-nhc2b\") pod \"d64f8e16-5199-499b-82ad-e0289139400c\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034433 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-credential-keys\") pod \"d64f8e16-5199-499b-82ad-e0289139400c\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034539 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-scripts\") pod \"d64f8e16-5199-499b-82ad-e0289139400c\" (UID: \"d64f8e16-5199-499b-82ad-e0289139400c\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034957 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034981 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.034993 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4186afd8-0940-41b6-91b1-522b7ba15413-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.035013 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.035025 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfj4x\" (UniqueName: \"kubernetes.io/projected/4186afd8-0940-41b6-91b1-522b7ba15413-kube-api-access-xfj4x\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.035050 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.040836 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4186afd8-0940-41b6-91b1-522b7ba15413","Type":"ContainerDied","Data":"93fd9385db220935f0cab1605fe15582a73e0d6c483914fc9ab73000f6f09381"} Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.040883 5081 scope.go:117] "RemoveContainer" containerID="6146b16ad5ab0a04856fd52b2dd2b0069ad92afb5a9882ef8774915960a6f925" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.041010 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.052598 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d64f8e16-5199-499b-82ad-e0289139400c-kube-api-access-nhc2b" (OuterVolumeSpecName: "kube-api-access-nhc2b") pod "d64f8e16-5199-499b-82ad-e0289139400c" (UID: "d64f8e16-5199-499b-82ad-e0289139400c"). InnerVolumeSpecName "kube-api-access-nhc2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.054224 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-scripts" (OuterVolumeSpecName: "scripts") pod "d64f8e16-5199-499b-82ad-e0289139400c" (UID: "d64f8e16-5199-499b-82ad-e0289139400c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.080444 5081 scope.go:117] "RemoveContainer" containerID="3435d9f0e22240edc1a31b3bc63cdecdce2f9106c751ebe0a06f5f62465d56a0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.085210 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea8e8f30-c5b5-4fc5-a692-75b234dfe04d-kube-api-access-866gc" (OuterVolumeSpecName: "kube-api-access-866gc") pod "ea8e8f30-c5b5-4fc5-a692-75b234dfe04d" (UID: "ea8e8f30-c5b5-4fc5-a692-75b234dfe04d"). InnerVolumeSpecName "kube-api-access-866gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.091390 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d64f8e16-5199-499b-82ad-e0289139400c" (UID: "d64f8e16-5199-499b-82ad-e0289139400c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.092528 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/322d9ed5-f62f-4984-b44c-013901c00c9a-kube-api-access-mlghv" (OuterVolumeSpecName: "kube-api-access-mlghv") pod "322d9ed5-f62f-4984-b44c-013901c00c9a" (UID: "322d9ed5-f62f-4984-b44c-013901c00c9a"). InnerVolumeSpecName "kube-api-access-mlghv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.093292 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d64f8e16-5199-499b-82ad-e0289139400c" (UID: "d64f8e16-5199-499b-82ad-e0289139400c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.105935 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/090ece03-2db4-4ab8-9ae0-a18b750bdc8b-kube-api-access-gjldp" (OuterVolumeSpecName: "kube-api-access-gjldp") pod "090ece03-2db4-4ab8-9ae0-a18b750bdc8b" (UID: "090ece03-2db4-4ab8-9ae0-a18b750bdc8b"). InnerVolumeSpecName "kube-api-access-gjldp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.121964 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-config-data" (OuterVolumeSpecName: "config-data") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.127670 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d64f8e16-5199-499b-82ad-e0289139400c" (UID: "d64f8e16-5199-499b-82ad-e0289139400c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.131211 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4186afd8-0940-41b6-91b1-522b7ba15413" (UID: "4186afd8-0940-41b6-91b1-522b7ba15413"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137031 5081 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137065 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137080 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137093 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4186afd8-0940-41b6-91b1-522b7ba15413-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137105 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137117 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-866gc\" (UniqueName: \"kubernetes.io/projected/ea8e8f30-c5b5-4fc5-a692-75b234dfe04d-kube-api-access-866gc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137130 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjldp\" (UniqueName: \"kubernetes.io/projected/090ece03-2db4-4ab8-9ae0-a18b750bdc8b-kube-api-access-gjldp\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137141 5081 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137152 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlghv\" (UniqueName: \"kubernetes.io/projected/322d9ed5-f62f-4984-b44c-013901c00c9a-kube-api-access-mlghv\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.137164 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhc2b\" (UniqueName: \"kubernetes.io/projected/d64f8e16-5199-499b-82ad-e0289139400c-kube-api-access-nhc2b\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.138806 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-config-data" (OuterVolumeSpecName: "config-data") pod "d64f8e16-5199-499b-82ad-e0289139400c" (UID: "d64f8e16-5199-499b-82ad-e0289139400c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.140016 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.166904 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.240107 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64f8e16-5199-499b-82ad-e0289139400c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.240148 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.343407 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-public-tls-certs\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.343925 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.343983 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-scripts\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.344060 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbjtr\" (UniqueName: \"kubernetes.io/projected/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-kube-api-access-kbjtr\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.344107 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-httpd-run\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.344154 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-logs\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.344177 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-config-data\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.344204 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-combined-ca-bundle\") pod \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\" (UID: \"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8\") " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.346621 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.346869 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-logs" (OuterVolumeSpecName: "logs") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.352895 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.357137 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-scripts" (OuterVolumeSpecName: "scripts") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.362397 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-kube-api-access-kbjtr" (OuterVolumeSpecName: "kube-api-access-kbjtr") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "kube-api-access-kbjtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.373467 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.400750 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.407478 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.414724 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.431836 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-config-data" (OuterVolumeSpecName: "config-data") pod "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" (UID: "b0e8ecb4-ecff-488a-8bd3-00257d8a41c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446292 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446334 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446352 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446363 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbjtr\" (UniqueName: \"kubernetes.io/projected/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-kube-api-access-kbjtr\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446375 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446385 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446395 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.446406 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450377 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450710 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-log" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450723 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-log" Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450740 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="322d9ed5-f62f-4984-b44c-013901c00c9a" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450748 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="322d9ed5-f62f-4984-b44c-013901c00c9a" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450770 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d64f8e16-5199-499b-82ad-e0289139400c" containerName="keystone-bootstrap" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450776 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d64f8e16-5199-499b-82ad-e0289139400c" containerName="keystone-bootstrap" Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450794 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="090ece03-2db4-4ab8-9ae0-a18b750bdc8b" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450800 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="090ece03-2db4-4ab8-9ae0-a18b750bdc8b" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450810 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-httpd" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450816 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-httpd" Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450827 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-httpd" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450833 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-httpd" Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450849 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-log" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450855 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-log" Oct 03 15:48:55 crc kubenswrapper[5081]: E1003 15:48:55.450862 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea8e8f30-c5b5-4fc5-a692-75b234dfe04d" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.450868 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea8e8f30-c5b5-4fc5-a692-75b234dfe04d" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451019 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="322d9ed5-f62f-4984-b44c-013901c00c9a" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451031 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-httpd" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451040 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-log" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451050 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" containerName="glance-httpd" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451062 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" containerName="glance-log" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451072 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea8e8f30-c5b5-4fc5-a692-75b234dfe04d" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451084 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="d64f8e16-5199-499b-82ad-e0289139400c" containerName="keystone-bootstrap" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451097 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="090ece03-2db4-4ab8-9ae0-a18b750bdc8b" containerName="mariadb-account-create" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.451942 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.457171 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.457496 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.461399 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.465604 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.548144 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649333 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-logs\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649387 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkfzk\" (UniqueName: \"kubernetes.io/projected/210b770a-8b67-424a-acea-05c551a48f36-kube-api-access-lkfzk\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649478 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649511 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649529 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-config-data\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649586 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649632 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.649648 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-scripts\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751163 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751204 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-scripts\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751240 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-logs\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751269 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkfzk\" (UniqueName: \"kubernetes.io/projected/210b770a-8b67-424a-acea-05c551a48f36-kube-api-access-lkfzk\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751322 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751345 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751360 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-config-data\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751390 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.751923 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.753876 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.753899 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-logs\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.760068 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-scripts\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.764644 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.768683 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-config-data\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.771484 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.774417 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkfzk\" (UniqueName: \"kubernetes.io/projected/210b770a-8b67-424a-acea-05c551a48f36-kube-api-access-lkfzk\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.780950 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:48:55 crc kubenswrapper[5081]: I1003 15:48:55.849893 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4186afd8-0940-41b6-91b1-522b7ba15413" path="/var/lib/kubelet/pods/4186afd8-0940-41b6-91b1-522b7ba15413/volumes" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.054342 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9759p" event={"ID":"cdc78530-6030-49c0-8e3e-ae1b26430a90","Type":"ContainerStarted","Data":"fa10789b894f70b4d3616f3383ce75ef1f96a89dfb2da225a13e895d6558e0e9"} Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.062969 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0e8ecb4-ecff-488a-8bd3-00257d8a41c8","Type":"ContainerDied","Data":"b55e937b95dcb2e054753c74b155f8fc1f5520d58f99b751e2c5dd7df986a0bd"} Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.063026 5081 scope.go:117] "RemoveContainer" containerID="fd0054759f6ce9dff50f0f16e38219f0d55088aeede0fbdd7650c5e9b20a631c" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.063058 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.067038 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerStarted","Data":"5a5653bb42b303da200835207bfbab66727da8afe29a91910c498bea52c7d5c6"} Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.070484 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-4548-account-create-pphwt" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.080147 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.093650 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-9759p" podStartSLOduration=2.707694559 podStartE2EDuration="10.093633898s" podCreationTimestamp="2025-10-03 15:48:46 +0000 UTC" firstStartedPulling="2025-10-03 15:48:47.401081125 +0000 UTC m=+1246.366637738" lastFinishedPulling="2025-10-03 15:48:54.787020464 +0000 UTC m=+1253.752577077" observedRunningTime="2025-10-03 15:48:56.077943594 +0000 UTC m=+1255.043500207" watchObservedRunningTime="2025-10-03 15:48:56.093633898 +0000 UTC m=+1255.059190511" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.099868 5081 scope.go:117] "RemoveContainer" containerID="537ba3f14b34bfae9ac860071a470308327689fbf78e26c13a8a50303151c52a" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.131131 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.149695 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.149755 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-kj7wf"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.160741 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-kj7wf"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.173658 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.175213 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.180320 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.181089 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.181349 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.212346 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dfcmf"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.213715 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.215536 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.216013 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mjd9n" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.216180 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.216325 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.233394 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dfcmf"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.367534 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.367881 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.367911 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59b9d\" (UniqueName: \"kubernetes.io/projected/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-kube-api-access-59b9d\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.367947 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.367975 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-scripts\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.367992 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-scripts\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368011 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-config-data\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368031 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-fernet-keys\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368053 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-credential-keys\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368073 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrlvz\" (UniqueName: \"kubernetes.io/projected/742235ed-5eea-4b22-be74-739791069828-kube-api-access-qrlvz\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368100 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-config-data\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368125 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-logs\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368162 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.368187 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-combined-ca-bundle\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473145 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-logs\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473199 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473239 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-combined-ca-bundle\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473289 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473325 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473353 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59b9d\" (UniqueName: \"kubernetes.io/projected/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-kube-api-access-59b9d\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473388 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473416 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-scripts\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473431 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-scripts\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473452 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-config-data\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473469 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-fernet-keys\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473493 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-credential-keys\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473510 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrlvz\" (UniqueName: \"kubernetes.io/projected/742235ed-5eea-4b22-be74-739791069828-kube-api-access-qrlvz\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.473537 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-config-data\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.474281 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-logs\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.475539 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.476903 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.480262 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-credential-keys\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.480347 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-config-data\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.480570 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-scripts\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.486115 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-scripts\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.487380 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-combined-ca-bundle\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.487461 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-fernet-keys\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.487970 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-config-data\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.488196 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.489749 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.492477 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59b9d\" (UniqueName: \"kubernetes.io/projected/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-kube-api-access-59b9d\") pod \"keystone-bootstrap-dfcmf\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.493324 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrlvz\" (UniqueName: \"kubernetes.io/projected/742235ed-5eea-4b22-be74-739791069828-kube-api-access-qrlvz\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.516260 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.539012 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.548241 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.648812 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.728807 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59775c759f-xvhlk"] Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.729065 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" podUID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerName="dnsmasq-dns" containerID="cri-o://a756938884cf8ee1b7cd495eb1c954b4895a28a83f195dc625f5ed706a090fd4" gracePeriod=10 Oct 03 15:48:56 crc kubenswrapper[5081]: I1003 15:48:56.818940 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.086771 5081 generic.go:334] "Generic (PLEG): container finished" podID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerID="a756938884cf8ee1b7cd495eb1c954b4895a28a83f195dc625f5ed706a090fd4" exitCode=0 Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.086847 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" event={"ID":"aaf72953-f8f2-4223-b8f8-07d58cb5418e","Type":"ContainerDied","Data":"a756938884cf8ee1b7cd495eb1c954b4895a28a83f195dc625f5ed706a090fd4"} Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.130768 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dfcmf"] Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.191951 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:48:57 crc kubenswrapper[5081]: W1003 15:48:57.285748 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcecfad1a_8dce_4c88_8435_2ef29c6d7fb9.slice/crio-e8380192350377404a32d6f04adecd83762ce3d229e39dea1ce4a62cfed3f360 WatchSource:0}: Error finding container e8380192350377404a32d6f04adecd83762ce3d229e39dea1ce4a62cfed3f360: Status 404 returned error can't find the container with id e8380192350377404a32d6f04adecd83762ce3d229e39dea1ce4a62cfed3f360 Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.782247 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.837816 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-config\") pod \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.837884 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-nb\") pod \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.837979 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-dns-svc\") pod \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.838087 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnc2v\" (UniqueName: \"kubernetes.io/projected/aaf72953-f8f2-4223-b8f8-07d58cb5418e-kube-api-access-xnc2v\") pod \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.838193 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-sb\") pod \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\" (UID: \"aaf72953-f8f2-4223-b8f8-07d58cb5418e\") " Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.839581 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0e8ecb4-ecff-488a-8bd3-00257d8a41c8" path="/var/lib/kubelet/pods/b0e8ecb4-ecff-488a-8bd3-00257d8a41c8/volumes" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.840440 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d64f8e16-5199-499b-82ad-e0289139400c" path="/var/lib/kubelet/pods/d64f8e16-5199-499b-82ad-e0289139400c/volumes" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.856874 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaf72953-f8f2-4223-b8f8-07d58cb5418e-kube-api-access-xnc2v" (OuterVolumeSpecName: "kube-api-access-xnc2v") pod "aaf72953-f8f2-4223-b8f8-07d58cb5418e" (UID: "aaf72953-f8f2-4223-b8f8-07d58cb5418e"). InnerVolumeSpecName "kube-api-access-xnc2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.899950 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "aaf72953-f8f2-4223-b8f8-07d58cb5418e" (UID: "aaf72953-f8f2-4223-b8f8-07d58cb5418e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.900108 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "aaf72953-f8f2-4223-b8f8-07d58cb5418e" (UID: "aaf72953-f8f2-4223-b8f8-07d58cb5418e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.901502 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-config" (OuterVolumeSpecName: "config") pod "aaf72953-f8f2-4223-b8f8-07d58cb5418e" (UID: "aaf72953-f8f2-4223-b8f8-07d58cb5418e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.933124 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "aaf72953-f8f2-4223-b8f8-07d58cb5418e" (UID: "aaf72953-f8f2-4223-b8f8-07d58cb5418e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.940161 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.940192 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.940204 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.940213 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aaf72953-f8f2-4223-b8f8-07d58cb5418e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:57 crc kubenswrapper[5081]: I1003 15:48:57.940224 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnc2v\" (UniqueName: \"kubernetes.io/projected/aaf72953-f8f2-4223-b8f8-07d58cb5418e-kube-api-access-xnc2v\") on node \"crc\" DevicePath \"\"" Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.101832 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" event={"ID":"aaf72953-f8f2-4223-b8f8-07d58cb5418e","Type":"ContainerDied","Data":"4b576980a85c825b12e191acced7ea9a63a8ba1ea0cad70fae83d168fd528a32"} Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.101871 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59775c759f-xvhlk" Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.101878 5081 scope.go:117] "RemoveContainer" containerID="a756938884cf8ee1b7cd495eb1c954b4895a28a83f195dc625f5ed706a090fd4" Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.103794 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcmf" event={"ID":"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9","Type":"ContainerStarted","Data":"e8380192350377404a32d6f04adecd83762ce3d229e39dea1ce4a62cfed3f360"} Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.105622 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"742235ed-5eea-4b22-be74-739791069828","Type":"ContainerStarted","Data":"ee39f5e563af73fa9df44b5893cc4dec6b00645f4dc50c703005ef8b4509b903"} Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.107005 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"210b770a-8b67-424a-acea-05c551a48f36","Type":"ContainerStarted","Data":"3c47a77e1071300704cc775cc253f89fe84e49e4abfeef3132d5aa6d3ce22b5c"} Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.126870 5081 scope.go:117] "RemoveContainer" containerID="0fc2f7cb9c7b4ba27b7ba099eb1843dfaf72c7130cc376ba0fc7aba42bf613c1" Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.144080 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59775c759f-xvhlk"] Oct 03 15:48:58 crc kubenswrapper[5081]: I1003 15:48:58.148884 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59775c759f-xvhlk"] Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.100704 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-z5l85"] Oct 03 15:48:59 crc kubenswrapper[5081]: E1003 15:48:59.101356 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerName="init" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.101372 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerName="init" Oct 03 15:48:59 crc kubenswrapper[5081]: E1003 15:48:59.101388 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerName="dnsmasq-dns" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.101396 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerName="dnsmasq-dns" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.101629 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" containerName="dnsmasq-dns" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.102165 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.105437 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-l497s" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.106976 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.114761 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-z5l85"] Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.137970 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"210b770a-8b67-424a-acea-05c551a48f36","Type":"ContainerStarted","Data":"3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572"} Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.144365 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcmf" event={"ID":"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9","Type":"ContainerStarted","Data":"7bb2c365bb5e50083ae986c7ff2d6a13940d0776ecb674a83c5e43e51215a427"} Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.147309 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"742235ed-5eea-4b22-be74-739791069828","Type":"ContainerStarted","Data":"6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45"} Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.162774 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47lzt\" (UniqueName: \"kubernetes.io/projected/556f311d-1398-48a5-bf38-7dc07f3bdfd0-kube-api-access-47lzt\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.162882 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-combined-ca-bundle\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.162918 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-db-sync-config-data\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.202031 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dfcmf" podStartSLOduration=3.20201057 podStartE2EDuration="3.20201057s" podCreationTimestamp="2025-10-03 15:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:48:59.180774355 +0000 UTC m=+1258.146330968" watchObservedRunningTime="2025-10-03 15:48:59.20201057 +0000 UTC m=+1258.167567173" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.244625 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-pnxvc"] Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.245814 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.250938 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.251217 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.251379 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-psb4d" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.259963 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pnxvc"] Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264188 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/680afad9-2798-42bd-a115-81c10c66662a-etc-machine-id\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264277 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47lzt\" (UniqueName: \"kubernetes.io/projected/556f311d-1398-48a5-bf38-7dc07f3bdfd0-kube-api-access-47lzt\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264318 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-combined-ca-bundle\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264344 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-db-sync-config-data\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264364 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb5m9\" (UniqueName: \"kubernetes.io/projected/680afad9-2798-42bd-a115-81c10c66662a-kube-api-access-wb5m9\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264387 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-combined-ca-bundle\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264424 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-scripts\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264448 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-db-sync-config-data\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.264469 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-config-data\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.280508 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-db-sync-config-data\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.292053 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47lzt\" (UniqueName: \"kubernetes.io/projected/556f311d-1398-48a5-bf38-7dc07f3bdfd0-kube-api-access-47lzt\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.303013 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-combined-ca-bundle\") pod \"barbican-db-sync-z5l85\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.338332 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-mlcdv"] Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.339340 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.350016 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.350188 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.350038 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-n4qpv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.353723 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-mlcdv"] Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.372838 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-scripts\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.372910 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-db-sync-config-data\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.372954 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-combined-ca-bundle\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.372982 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-config-data\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.373027 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/680afad9-2798-42bd-a115-81c10c66662a-etc-machine-id\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.373116 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfmlf\" (UniqueName: \"kubernetes.io/projected/7456396f-1779-43c3-9a7c-888c42e64a52-kube-api-access-dfmlf\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.373184 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-config\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.373206 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb5m9\" (UniqueName: \"kubernetes.io/projected/680afad9-2798-42bd-a115-81c10c66662a-kube-api-access-wb5m9\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.373280 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-combined-ca-bundle\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.378124 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-combined-ca-bundle\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.380510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/680afad9-2798-42bd-a115-81c10c66662a-etc-machine-id\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.384261 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-config-data\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.384723 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-db-sync-config-data\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.387304 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-scripts\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.400913 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb5m9\" (UniqueName: \"kubernetes.io/projected/680afad9-2798-42bd-a115-81c10c66662a-kube-api-access-wb5m9\") pod \"cinder-db-sync-pnxvc\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.476160 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-combined-ca-bundle\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.476853 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfmlf\" (UniqueName: \"kubernetes.io/projected/7456396f-1779-43c3-9a7c-888c42e64a52-kube-api-access-dfmlf\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.477004 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-config\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.481323 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-combined-ca-bundle\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.481381 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-config\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.481469 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-z5l85" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.497305 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfmlf\" (UniqueName: \"kubernetes.io/projected/7456396f-1779-43c3-9a7c-888c42e64a52-kube-api-access-dfmlf\") pod \"neutron-db-sync-mlcdv\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.569904 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.678981 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:48:59 crc kubenswrapper[5081]: I1003 15:48:59.848699 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaf72953-f8f2-4223-b8f8-07d58cb5418e" path="/var/lib/kubelet/pods/aaf72953-f8f2-4223-b8f8-07d58cb5418e/volumes" Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.017044 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-z5l85"] Oct 03 15:49:00 crc kubenswrapper[5081]: W1003 15:49:00.033515 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod556f311d_1398_48a5_bf38_7dc07f3bdfd0.slice/crio-30e9d266b642c5049cbb75d9b6df3dd06b7be11320abd08f42da2549b9b0f778 WatchSource:0}: Error finding container 30e9d266b642c5049cbb75d9b6df3dd06b7be11320abd08f42da2549b9b0f778: Status 404 returned error can't find the container with id 30e9d266b642c5049cbb75d9b6df3dd06b7be11320abd08f42da2549b9b0f778 Oct 03 15:49:00 crc kubenswrapper[5081]: W1003 15:49:00.119547 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod680afad9_2798_42bd_a115_81c10c66662a.slice/crio-259faedf03908632ae4f23c432d6578689252b4a8197c64eb9cc053c801a2c16 WatchSource:0}: Error finding container 259faedf03908632ae4f23c432d6578689252b4a8197c64eb9cc053c801a2c16: Status 404 returned error can't find the container with id 259faedf03908632ae4f23c432d6578689252b4a8197c64eb9cc053c801a2c16 Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.125950 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pnxvc"] Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.157436 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-z5l85" event={"ID":"556f311d-1398-48a5-bf38-7dc07f3bdfd0","Type":"ContainerStarted","Data":"30e9d266b642c5049cbb75d9b6df3dd06b7be11320abd08f42da2549b9b0f778"} Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.160991 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"210b770a-8b67-424a-acea-05c551a48f36","Type":"ContainerStarted","Data":"6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492"} Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.168036 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pnxvc" event={"ID":"680afad9-2798-42bd-a115-81c10c66662a","Type":"ContainerStarted","Data":"259faedf03908632ae4f23c432d6578689252b4a8197c64eb9cc053c801a2c16"} Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.170443 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"742235ed-5eea-4b22-be74-739791069828","Type":"ContainerStarted","Data":"406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217"} Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.174936 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerStarted","Data":"4bc7fd788ed88f62710523d7d0f1940f62dcfc74674d112b0356dec51458333b"} Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.196629 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.196610631 podStartE2EDuration="5.196610631s" podCreationTimestamp="2025-10-03 15:48:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:00.180770082 +0000 UTC m=+1259.146326715" watchObservedRunningTime="2025-10-03 15:49:00.196610631 +0000 UTC m=+1259.162167244" Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.221135 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.22111293 podStartE2EDuration="4.22111293s" podCreationTimestamp="2025-10-03 15:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:00.215534988 +0000 UTC m=+1259.181091601" watchObservedRunningTime="2025-10-03 15:49:00.22111293 +0000 UTC m=+1259.186669543" Oct 03 15:49:00 crc kubenswrapper[5081]: I1003 15:49:00.243907 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-mlcdv"] Oct 03 15:49:00 crc kubenswrapper[5081]: W1003 15:49:00.248960 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7456396f_1779_43c3_9a7c_888c42e64a52.slice/crio-06adf16d049c4c14632b86dc43ae50979e43cb0c754ba60ef77b52d93746e962 WatchSource:0}: Error finding container 06adf16d049c4c14632b86dc43ae50979e43cb0c754ba60ef77b52d93746e962: Status 404 returned error can't find the container with id 06adf16d049c4c14632b86dc43ae50979e43cb0c754ba60ef77b52d93746e962 Oct 03 15:49:01 crc kubenswrapper[5081]: I1003 15:49:01.185705 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mlcdv" event={"ID":"7456396f-1779-43c3-9a7c-888c42e64a52","Type":"ContainerStarted","Data":"139b61155eb21f18f967d59dc57cb8879b6e1eb054f36621551ecc1cecdc4357"} Oct 03 15:49:01 crc kubenswrapper[5081]: I1003 15:49:01.186043 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mlcdv" event={"ID":"7456396f-1779-43c3-9a7c-888c42e64a52","Type":"ContainerStarted","Data":"06adf16d049c4c14632b86dc43ae50979e43cb0c754ba60ef77b52d93746e962"} Oct 03 15:49:01 crc kubenswrapper[5081]: I1003 15:49:01.189812 5081 generic.go:334] "Generic (PLEG): container finished" podID="cdc78530-6030-49c0-8e3e-ae1b26430a90" containerID="fa10789b894f70b4d3616f3383ce75ef1f96a89dfb2da225a13e895d6558e0e9" exitCode=0 Oct 03 15:49:01 crc kubenswrapper[5081]: I1003 15:49:01.190369 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9759p" event={"ID":"cdc78530-6030-49c0-8e3e-ae1b26430a90","Type":"ContainerDied","Data":"fa10789b894f70b4d3616f3383ce75ef1f96a89dfb2da225a13e895d6558e0e9"} Oct 03 15:49:01 crc kubenswrapper[5081]: I1003 15:49:01.216519 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-mlcdv" podStartSLOduration=2.216501805 podStartE2EDuration="2.216501805s" podCreationTimestamp="2025-10-03 15:48:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:01.204314522 +0000 UTC m=+1260.169871155" watchObservedRunningTime="2025-10-03 15:49:01.216501805 +0000 UTC m=+1260.182058418" Oct 03 15:49:02 crc kubenswrapper[5081]: I1003 15:49:02.206454 5081 generic.go:334] "Generic (PLEG): container finished" podID="cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" containerID="7bb2c365bb5e50083ae986c7ff2d6a13940d0776ecb674a83c5e43e51215a427" exitCode=0 Oct 03 15:49:02 crc kubenswrapper[5081]: I1003 15:49:02.206672 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcmf" event={"ID":"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9","Type":"ContainerDied","Data":"7bb2c365bb5e50083ae986c7ff2d6a13940d0776ecb674a83c5e43e51215a427"} Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.204196 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9759p" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.222255 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9759p" event={"ID":"cdc78530-6030-49c0-8e3e-ae1b26430a90","Type":"ContainerDied","Data":"18d0cfae8e2e0fef32d2e5fe717fd2502335327c20991b2040fb541b76c6b391"} Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.222301 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18d0cfae8e2e0fef32d2e5fe717fd2502335327c20991b2040fb541b76c6b391" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.222591 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9759p" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.253918 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdc78530-6030-49c0-8e3e-ae1b26430a90-logs\") pod \"cdc78530-6030-49c0-8e3e-ae1b26430a90\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.254127 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-config-data\") pod \"cdc78530-6030-49c0-8e3e-ae1b26430a90\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.254154 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khrvc\" (UniqueName: \"kubernetes.io/projected/cdc78530-6030-49c0-8e3e-ae1b26430a90-kube-api-access-khrvc\") pod \"cdc78530-6030-49c0-8e3e-ae1b26430a90\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.254219 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-combined-ca-bundle\") pod \"cdc78530-6030-49c0-8e3e-ae1b26430a90\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.254310 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-scripts\") pod \"cdc78530-6030-49c0-8e3e-ae1b26430a90\" (UID: \"cdc78530-6030-49c0-8e3e-ae1b26430a90\") " Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.254654 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdc78530-6030-49c0-8e3e-ae1b26430a90-logs" (OuterVolumeSpecName: "logs") pod "cdc78530-6030-49c0-8e3e-ae1b26430a90" (UID: "cdc78530-6030-49c0-8e3e-ae1b26430a90"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.259966 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-scripts" (OuterVolumeSpecName: "scripts") pod "cdc78530-6030-49c0-8e3e-ae1b26430a90" (UID: "cdc78530-6030-49c0-8e3e-ae1b26430a90"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.276176 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc78530-6030-49c0-8e3e-ae1b26430a90-kube-api-access-khrvc" (OuterVolumeSpecName: "kube-api-access-khrvc") pod "cdc78530-6030-49c0-8e3e-ae1b26430a90" (UID: "cdc78530-6030-49c0-8e3e-ae1b26430a90"). InnerVolumeSpecName "kube-api-access-khrvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.283102 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cdc78530-6030-49c0-8e3e-ae1b26430a90" (UID: "cdc78530-6030-49c0-8e3e-ae1b26430a90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.302798 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-58db4df78-q9st4"] Oct 03 15:49:03 crc kubenswrapper[5081]: E1003 15:49:03.303186 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc78530-6030-49c0-8e3e-ae1b26430a90" containerName="placement-db-sync" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.303200 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc78530-6030-49c0-8e3e-ae1b26430a90" containerName="placement-db-sync" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.304811 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdc78530-6030-49c0-8e3e-ae1b26430a90" containerName="placement-db-sync" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.306114 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.312069 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.312343 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.318705 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-config-data" (OuterVolumeSpecName: "config-data") pod "cdc78530-6030-49c0-8e3e-ae1b26430a90" (UID: "cdc78530-6030-49c0-8e3e-ae1b26430a90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.344618 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-58db4df78-q9st4"] Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.359475 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.359515 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.359530 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdc78530-6030-49c0-8e3e-ae1b26430a90-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.359541 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdc78530-6030-49c0-8e3e-ae1b26430a90-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.359553 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khrvc\" (UniqueName: \"kubernetes.io/projected/cdc78530-6030-49c0-8e3e-ae1b26430a90-kube-api-access-khrvc\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.462376 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5mr8\" (UniqueName: \"kubernetes.io/projected/35ad4758-5e5f-4ba3-84be-a4ae754e9048-kube-api-access-l5mr8\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.462443 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-public-tls-certs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.462498 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-config-data\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.462552 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35ad4758-5e5f-4ba3-84be-a4ae754e9048-logs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.462599 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-combined-ca-bundle\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.462655 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-scripts\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.462694 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-internal-tls-certs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.566008 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-config-data\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.566085 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35ad4758-5e5f-4ba3-84be-a4ae754e9048-logs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.566104 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-combined-ca-bundle\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.566146 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-scripts\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.566216 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-internal-tls-certs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.566274 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5mr8\" (UniqueName: \"kubernetes.io/projected/35ad4758-5e5f-4ba3-84be-a4ae754e9048-kube-api-access-l5mr8\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.566306 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-public-tls-certs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.567227 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35ad4758-5e5f-4ba3-84be-a4ae754e9048-logs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.569784 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-public-tls-certs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.570034 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-internal-tls-certs\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.573231 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-config-data\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.574934 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-scripts\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.575919 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-combined-ca-bundle\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.587267 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5mr8\" (UniqueName: \"kubernetes.io/projected/35ad4758-5e5f-4ba3-84be-a4ae754e9048-kube-api-access-l5mr8\") pod \"placement-58db4df78-q9st4\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:03 crc kubenswrapper[5081]: I1003 15:49:03.727696 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.535170 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.609308 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-fernet-keys\") pod \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.609816 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-combined-ca-bundle\") pod \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.610894 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59b9d\" (UniqueName: \"kubernetes.io/projected/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-kube-api-access-59b9d\") pod \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.611098 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-scripts\") pod \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.611213 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-credential-keys\") pod \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.611370 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-config-data\") pod \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\" (UID: \"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9\") " Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.623034 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" (UID: "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.623065 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-scripts" (OuterVolumeSpecName: "scripts") pod "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" (UID: "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.623641 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" (UID: "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.637353 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-kube-api-access-59b9d" (OuterVolumeSpecName: "kube-api-access-59b9d") pod "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" (UID: "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9"). InnerVolumeSpecName "kube-api-access-59b9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.645320 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" (UID: "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.645707 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-config-data" (OuterVolumeSpecName: "config-data") pod "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" (UID: "cecfad1a-8dce-4c88-8435-2ef29c6d7fb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.714107 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.714156 5081 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.714172 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.714189 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59b9d\" (UniqueName: \"kubernetes.io/projected/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-kube-api-access-59b9d\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.714201 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:05 crc kubenswrapper[5081]: I1003 15:49:05.714212 5081 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.080937 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.080985 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.114022 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.151642 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.255262 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcmf" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.255305 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcmf" event={"ID":"cecfad1a-8dce-4c88-8435-2ef29c6d7fb9","Type":"ContainerDied","Data":"e8380192350377404a32d6f04adecd83762ce3d229e39dea1ce4a62cfed3f360"} Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.255331 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8380192350377404a32d6f04adecd83762ce3d229e39dea1ce4a62cfed3f360" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.255703 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.255769 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.540719 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.540762 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.586795 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.591297 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.628073 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7f5c54b599-s8jwr"] Oct 03 15:49:06 crc kubenswrapper[5081]: E1003 15:49:06.628608 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" containerName="keystone-bootstrap" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.628630 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" containerName="keystone-bootstrap" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.628859 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" containerName="keystone-bootstrap" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.629653 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.634610 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.635391 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mjd9n" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.635432 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.635638 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.635809 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.635958 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.664958 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7f5c54b599-s8jwr"] Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.736090 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-fernet-keys\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.736144 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-internal-tls-certs\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.736183 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-config-data\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.736394 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljcvj\" (UniqueName: \"kubernetes.io/projected/254b0c39-e3af-4a48-a954-5ff334d36670-kube-api-access-ljcvj\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.736535 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-credential-keys\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.736593 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-scripts\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.736661 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-public-tls-certs\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.737440 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-combined-ca-bundle\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.838737 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-combined-ca-bundle\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.838809 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-fernet-keys\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.838838 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-internal-tls-certs\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.838869 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-config-data\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.839093 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljcvj\" (UniqueName: \"kubernetes.io/projected/254b0c39-e3af-4a48-a954-5ff334d36670-kube-api-access-ljcvj\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.839137 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-credential-keys\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.839166 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-scripts\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.839201 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-public-tls-certs\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.843521 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-scripts\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.843572 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-credential-keys\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.843745 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-config-data\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.844579 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-public-tls-certs\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.844625 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-combined-ca-bundle\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.846122 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-fernet-keys\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.855535 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-internal-tls-certs\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.867317 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljcvj\" (UniqueName: \"kubernetes.io/projected/254b0c39-e3af-4a48-a954-5ff334d36670-kube-api-access-ljcvj\") pod \"keystone-7f5c54b599-s8jwr\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:06 crc kubenswrapper[5081]: I1003 15:49:06.950757 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:07 crc kubenswrapper[5081]: I1003 15:49:07.263867 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 15:49:07 crc kubenswrapper[5081]: I1003 15:49:07.263904 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 15:49:08 crc kubenswrapper[5081]: I1003 15:49:08.521314 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:08 crc kubenswrapper[5081]: I1003 15:49:08.521749 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:49:08 crc kubenswrapper[5081]: I1003 15:49:08.523702 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 15:49:09 crc kubenswrapper[5081]: I1003 15:49:09.342675 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 15:49:09 crc kubenswrapper[5081]: I1003 15:49:09.343339 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:49:09 crc kubenswrapper[5081]: I1003 15:49:09.636023 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 15:49:17 crc kubenswrapper[5081]: E1003 15:49:17.478311 5081 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:f4b02f57187855a6adb5b32d9a8ed92dea2376471c6e33783b4c45f4b56b0166" Oct 03 15:49:17 crc kubenswrapper[5081]: E1003 15:49:17.479066 5081 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:f4b02f57187855a6adb5b32d9a8ed92dea2376471c6e33783b4c45f4b56b0166,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wb5m9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-pnxvc_openstack(680afad9-2798-42bd-a115-81c10c66662a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 15:49:17 crc kubenswrapper[5081]: E1003 15:49:17.480262 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-pnxvc" podUID="680afad9-2798-42bd-a115-81c10c66662a" Oct 03 15:49:17 crc kubenswrapper[5081]: I1003 15:49:17.852188 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7f5c54b599-s8jwr"] Oct 03 15:49:17 crc kubenswrapper[5081]: I1003 15:49:17.987776 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-58db4df78-q9st4"] Oct 03 15:49:18 crc kubenswrapper[5081]: W1003 15:49:18.005488 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35ad4758_5e5f_4ba3_84be_a4ae754e9048.slice/crio-f594147252eaa981370a57f87e2776304e0af74fd29f9f3d68280dfb42f3df53 WatchSource:0}: Error finding container f594147252eaa981370a57f87e2776304e0af74fd29f9f3d68280dfb42f3df53: Status 404 returned error can't find the container with id f594147252eaa981370a57f87e2776304e0af74fd29f9f3d68280dfb42f3df53 Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.366711 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58db4df78-q9st4" event={"ID":"35ad4758-5e5f-4ba3-84be-a4ae754e9048","Type":"ContainerStarted","Data":"683a9cea3704f28dee554ad7d0a5fd46617595e708f08884aeeafa501eeeb131"} Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.367154 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.367191 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58db4df78-q9st4" event={"ID":"35ad4758-5e5f-4ba3-84be-a4ae754e9048","Type":"ContainerStarted","Data":"5667e13010e026cffae93f1f48fe2279663fa544702ab46b085a5c829713ef57"} Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.367210 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58db4df78-q9st4" event={"ID":"35ad4758-5e5f-4ba3-84be-a4ae754e9048","Type":"ContainerStarted","Data":"f594147252eaa981370a57f87e2776304e0af74fd29f9f3d68280dfb42f3df53"} Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.367226 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.368520 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-z5l85" event={"ID":"556f311d-1398-48a5-bf38-7dc07f3bdfd0","Type":"ContainerStarted","Data":"67f277b31611a80868ec8ef47b063a503f8912117172f638b0d316cb15085922"} Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.370894 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f5c54b599-s8jwr" event={"ID":"254b0c39-e3af-4a48-a954-5ff334d36670","Type":"ContainerStarted","Data":"9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18"} Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.370929 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f5c54b599-s8jwr" event={"ID":"254b0c39-e3af-4a48-a954-5ff334d36670","Type":"ContainerStarted","Data":"c65a84dea2d07c0406ddc3330207acc536f4b55adbfdb1e29cee19462a1e83ff"} Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.370970 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.373578 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerStarted","Data":"26d92e5326c776c8b8b24981a7522bcd3c972ca4f4887bb5abb550c2c3acb072"} Oct 03 15:49:18 crc kubenswrapper[5081]: E1003 15:49:18.374987 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:f4b02f57187855a6adb5b32d9a8ed92dea2376471c6e33783b4c45f4b56b0166\\\"\"" pod="openstack/cinder-db-sync-pnxvc" podUID="680afad9-2798-42bd-a115-81c10c66662a" Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.393379 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-58db4df78-q9st4" podStartSLOduration=15.393358051 podStartE2EDuration="15.393358051s" podCreationTimestamp="2025-10-03 15:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:18.390461687 +0000 UTC m=+1277.356018320" watchObservedRunningTime="2025-10-03 15:49:18.393358051 +0000 UTC m=+1277.358914684" Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.439421 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-z5l85" podStartSLOduration=2.049094798 podStartE2EDuration="19.439399594s" podCreationTimestamp="2025-10-03 15:48:59 +0000 UTC" firstStartedPulling="2025-10-03 15:49:00.038356299 +0000 UTC m=+1259.003912912" lastFinishedPulling="2025-10-03 15:49:17.428661095 +0000 UTC m=+1276.394217708" observedRunningTime="2025-10-03 15:49:18.405782321 +0000 UTC m=+1277.371338934" watchObservedRunningTime="2025-10-03 15:49:18.439399594 +0000 UTC m=+1277.404956207" Oct 03 15:49:18 crc kubenswrapper[5081]: I1003 15:49:18.450096 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7f5c54b599-s8jwr" podStartSLOduration=12.450078333 podStartE2EDuration="12.450078333s" podCreationTimestamp="2025-10-03 15:49:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:18.429778535 +0000 UTC m=+1277.395335148" watchObservedRunningTime="2025-10-03 15:49:18.450078333 +0000 UTC m=+1277.415634946" Oct 03 15:49:23 crc kubenswrapper[5081]: I1003 15:49:23.420499 5081 generic.go:334] "Generic (PLEG): container finished" podID="556f311d-1398-48a5-bf38-7dc07f3bdfd0" containerID="67f277b31611a80868ec8ef47b063a503f8912117172f638b0d316cb15085922" exitCode=0 Oct 03 15:49:23 crc kubenswrapper[5081]: I1003 15:49:23.420601 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-z5l85" event={"ID":"556f311d-1398-48a5-bf38-7dc07f3bdfd0","Type":"ContainerDied","Data":"67f277b31611a80868ec8ef47b063a503f8912117172f638b0d316cb15085922"} Oct 03 15:49:24 crc kubenswrapper[5081]: I1003 15:49:24.828311 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-z5l85" Oct 03 15:49:24 crc kubenswrapper[5081]: I1003 15:49:24.906357 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-combined-ca-bundle\") pod \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " Oct 03 15:49:24 crc kubenswrapper[5081]: I1003 15:49:24.906447 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-db-sync-config-data\") pod \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " Oct 03 15:49:24 crc kubenswrapper[5081]: I1003 15:49:24.906541 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47lzt\" (UniqueName: \"kubernetes.io/projected/556f311d-1398-48a5-bf38-7dc07f3bdfd0-kube-api-access-47lzt\") pod \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\" (UID: \"556f311d-1398-48a5-bf38-7dc07f3bdfd0\") " Oct 03 15:49:24 crc kubenswrapper[5081]: I1003 15:49:24.911112 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "556f311d-1398-48a5-bf38-7dc07f3bdfd0" (UID: "556f311d-1398-48a5-bf38-7dc07f3bdfd0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:24 crc kubenswrapper[5081]: I1003 15:49:24.912077 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/556f311d-1398-48a5-bf38-7dc07f3bdfd0-kube-api-access-47lzt" (OuterVolumeSpecName: "kube-api-access-47lzt") pod "556f311d-1398-48a5-bf38-7dc07f3bdfd0" (UID: "556f311d-1398-48a5-bf38-7dc07f3bdfd0"). InnerVolumeSpecName "kube-api-access-47lzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:24 crc kubenswrapper[5081]: I1003 15:49:24.931844 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "556f311d-1398-48a5-bf38-7dc07f3bdfd0" (UID: "556f311d-1398-48a5-bf38-7dc07f3bdfd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.008702 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.008742 5081 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/556f311d-1398-48a5-bf38-7dc07f3bdfd0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.008753 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47lzt\" (UniqueName: \"kubernetes.io/projected/556f311d-1398-48a5-bf38-7dc07f3bdfd0-kube-api-access-47lzt\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.477959 5081 generic.go:334] "Generic (PLEG): container finished" podID="7456396f-1779-43c3-9a7c-888c42e64a52" containerID="139b61155eb21f18f967d59dc57cb8879b6e1eb054f36621551ecc1cecdc4357" exitCode=0 Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.478094 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mlcdv" event={"ID":"7456396f-1779-43c3-9a7c-888c42e64a52","Type":"ContainerDied","Data":"139b61155eb21f18f967d59dc57cb8879b6e1eb054f36621551ecc1cecdc4357"} Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.484143 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerStarted","Data":"a97318e6854d3447afbe240b9fac493d55de58a6a17c4282a1cae5fecfab39b5"} Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.484402 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-central-agent" containerID="cri-o://5a5653bb42b303da200835207bfbab66727da8afe29a91910c498bea52c7d5c6" gracePeriod=30 Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.484443 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.484415 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="sg-core" containerID="cri-o://26d92e5326c776c8b8b24981a7522bcd3c972ca4f4887bb5abb550c2c3acb072" gracePeriod=30 Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.484410 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="proxy-httpd" containerID="cri-o://a97318e6854d3447afbe240b9fac493d55de58a6a17c4282a1cae5fecfab39b5" gracePeriod=30 Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.484444 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-notification-agent" containerID="cri-o://4bc7fd788ed88f62710523d7d0f1940f62dcfc74674d112b0356dec51458333b" gracePeriod=30 Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.493053 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-z5l85" event={"ID":"556f311d-1398-48a5-bf38-7dc07f3bdfd0","Type":"ContainerDied","Data":"30e9d266b642c5049cbb75d9b6df3dd06b7be11320abd08f42da2549b9b0f778"} Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.493133 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30e9d266b642c5049cbb75d9b6df3dd06b7be11320abd08f42da2549b9b0f778" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.493234 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-z5l85" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.529528 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.111372146 podStartE2EDuration="39.529496888s" podCreationTimestamp="2025-10-03 15:48:46 +0000 UTC" firstStartedPulling="2025-10-03 15:48:47.332770538 +0000 UTC m=+1246.298327151" lastFinishedPulling="2025-10-03 15:49:24.75089528 +0000 UTC m=+1283.716451893" observedRunningTime="2025-10-03 15:49:25.523140364 +0000 UTC m=+1284.488697017" watchObservedRunningTime="2025-10-03 15:49:25.529496888 +0000 UTC m=+1284.495053521" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.710388 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7bcffbc9c7-qbr72"] Oct 03 15:49:25 crc kubenswrapper[5081]: E1003 15:49:25.710923 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556f311d-1398-48a5-bf38-7dc07f3bdfd0" containerName="barbican-db-sync" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.710946 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="556f311d-1398-48a5-bf38-7dc07f3bdfd0" containerName="barbican-db-sync" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.711195 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="556f311d-1398-48a5-bf38-7dc07f3bdfd0" containerName="barbican-db-sync" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.712456 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.718142 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.718412 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.718958 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-l497s" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.728413 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bcffbc9c7-qbr72"] Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.808652 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5f7bd66f74-sbcrq"] Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.810498 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.813005 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.822607 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-logs\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.822661 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnz78\" (UniqueName: \"kubernetes.io/projected/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-kube-api-access-pnz78\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.822687 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data-custom\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.822711 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-combined-ca-bundle\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.822749 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.871884 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5f7bd66f74-sbcrq"] Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.877995 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf65f669-nhxgq"] Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.886712 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.914489 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf65f669-nhxgq"] Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929524 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgfnx\" (UniqueName: \"kubernetes.io/projected/681f8c15-9cbf-4416-83c4-36429c38a18d-kube-api-access-bgfnx\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929612 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929715 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data-custom\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929759 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/681f8c15-9cbf-4416-83c4-36429c38a18d-logs\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929779 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-combined-ca-bundle\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929801 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-logs\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929827 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnz78\" (UniqueName: \"kubernetes.io/projected/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-kube-api-access-pnz78\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929853 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data-custom\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929886 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-combined-ca-bundle\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.929909 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.931225 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-logs\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.938608 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-combined-ca-bundle\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.948038 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data-custom\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.948074 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.966307 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnz78\" (UniqueName: \"kubernetes.io/projected/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-kube-api-access-pnz78\") pod \"barbican-worker-7bcffbc9c7-qbr72\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.992753 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-99d7cb8b8-rl52f"] Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.995277 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:25 crc kubenswrapper[5081]: I1003 15:49:25.998363 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.013244 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-99d7cb8b8-rl52f"] Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.031671 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-config\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.031730 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.031832 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgfnx\" (UniqueName: \"kubernetes.io/projected/681f8c15-9cbf-4416-83c4-36429c38a18d-kube-api-access-bgfnx\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032069 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-sb\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032119 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-swift-storage-0\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032240 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data-custom\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032285 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsdl9\" (UniqueName: \"kubernetes.io/projected/b85f962b-8153-42a3-90fe-33e1d52fd292-kube-api-access-gsdl9\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032315 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032357 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-combined-ca-bundle\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032382 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/681f8c15-9cbf-4416-83c4-36429c38a18d-logs\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032835 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-svc\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.032943 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/681f8c15-9cbf-4416-83c4-36429c38a18d-logs\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.036264 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data-custom\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.036417 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.049911 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgfnx\" (UniqueName: \"kubernetes.io/projected/681f8c15-9cbf-4416-83c4-36429c38a18d-kube-api-access-bgfnx\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.050353 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-combined-ca-bundle\") pod \"barbican-keystone-listener-5f7bd66f74-sbcrq\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.087975 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.134378 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-config\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.134444 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-combined-ca-bundle\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.134493 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s745\" (UniqueName: \"kubernetes.io/projected/20450407-5a43-40e1-979d-49644437a4d2-kube-api-access-2s745\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.134846 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-sb\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.134930 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-swift-storage-0\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.135007 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data-custom\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.135047 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.135102 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsdl9\" (UniqueName: \"kubernetes.io/projected/b85f962b-8153-42a3-90fe-33e1d52fd292-kube-api-access-gsdl9\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.135123 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20450407-5a43-40e1-979d-49644437a4d2-logs\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.135147 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.135253 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-svc\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.135503 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-config\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.136997 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-sb\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.137426 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-svc\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.137537 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-swift-storage-0\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.137981 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.152468 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsdl9\" (UniqueName: \"kubernetes.io/projected/b85f962b-8153-42a3-90fe-33e1d52fd292-kube-api-access-gsdl9\") pod \"dnsmasq-dns-5bf65f669-nhxgq\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.175828 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.211290 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.237053 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.237112 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data-custom\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.237151 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20450407-5a43-40e1-979d-49644437a4d2-logs\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.238075 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20450407-5a43-40e1-979d-49644437a4d2-logs\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.238519 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-combined-ca-bundle\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.238954 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s745\" (UniqueName: \"kubernetes.io/projected/20450407-5a43-40e1-979d-49644437a4d2-kube-api-access-2s745\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.244404 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-combined-ca-bundle\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.247066 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data-custom\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.247521 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.255660 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s745\" (UniqueName: \"kubernetes.io/projected/20450407-5a43-40e1-979d-49644437a4d2-kube-api-access-2s745\") pod \"barbican-api-99d7cb8b8-rl52f\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.334714 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.380896 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bcffbc9c7-qbr72"] Oct 03 15:49:26 crc kubenswrapper[5081]: W1003 15:49:26.401505 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf64376f5_c10d_45cb_a9eb_81d9a4cd8b19.slice/crio-97b8550bf479f277c469071e93c069ed04e84c9543820f2a58f6917640c84441 WatchSource:0}: Error finding container 97b8550bf479f277c469071e93c069ed04e84c9543820f2a58f6917640c84441: Status 404 returned error can't find the container with id 97b8550bf479f277c469071e93c069ed04e84c9543820f2a58f6917640c84441 Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.523403 5081 generic.go:334] "Generic (PLEG): container finished" podID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerID="a97318e6854d3447afbe240b9fac493d55de58a6a17c4282a1cae5fecfab39b5" exitCode=0 Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.523447 5081 generic.go:334] "Generic (PLEG): container finished" podID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerID="26d92e5326c776c8b8b24981a7522bcd3c972ca4f4887bb5abb550c2c3acb072" exitCode=2 Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.523459 5081 generic.go:334] "Generic (PLEG): container finished" podID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerID="5a5653bb42b303da200835207bfbab66727da8afe29a91910c498bea52c7d5c6" exitCode=0 Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.523513 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerDied","Data":"a97318e6854d3447afbe240b9fac493d55de58a6a17c4282a1cae5fecfab39b5"} Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.523571 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerDied","Data":"26d92e5326c776c8b8b24981a7522bcd3c972ca4f4887bb5abb550c2c3acb072"} Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.523588 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerDied","Data":"5a5653bb42b303da200835207bfbab66727da8afe29a91910c498bea52c7d5c6"} Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.527531 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" event={"ID":"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19","Type":"ContainerStarted","Data":"97b8550bf479f277c469071e93c069ed04e84c9543820f2a58f6917640c84441"} Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.687361 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5f7bd66f74-sbcrq"] Oct 03 15:49:26 crc kubenswrapper[5081]: W1003 15:49:26.705392 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod681f8c15_9cbf_4416_83c4_36429c38a18d.slice/crio-186c656d2ca647d5203c49066d02b34873109096006cca45fa7d819f8b971bf1 WatchSource:0}: Error finding container 186c656d2ca647d5203c49066d02b34873109096006cca45fa7d819f8b971bf1: Status 404 returned error can't find the container with id 186c656d2ca647d5203c49066d02b34873109096006cca45fa7d819f8b971bf1 Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.900499 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf65f669-nhxgq"] Oct 03 15:49:26 crc kubenswrapper[5081]: I1003 15:49:26.942916 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-99d7cb8b8-rl52f"] Oct 03 15:49:26 crc kubenswrapper[5081]: W1003 15:49:26.984536 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20450407_5a43_40e1_979d_49644437a4d2.slice/crio-1145bd0d58b4d024fa872e4015bda6677867d44981a5bb04e44ec081ffec3e1a WatchSource:0}: Error finding container 1145bd0d58b4d024fa872e4015bda6677867d44981a5bb04e44ec081ffec3e1a: Status 404 returned error can't find the container with id 1145bd0d58b4d024fa872e4015bda6677867d44981a5bb04e44ec081ffec3e1a Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.262199 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.364056 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfmlf\" (UniqueName: \"kubernetes.io/projected/7456396f-1779-43c3-9a7c-888c42e64a52-kube-api-access-dfmlf\") pod \"7456396f-1779-43c3-9a7c-888c42e64a52\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.364455 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-config\") pod \"7456396f-1779-43c3-9a7c-888c42e64a52\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.364613 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-combined-ca-bundle\") pod \"7456396f-1779-43c3-9a7c-888c42e64a52\" (UID: \"7456396f-1779-43c3-9a7c-888c42e64a52\") " Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.371109 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7456396f-1779-43c3-9a7c-888c42e64a52-kube-api-access-dfmlf" (OuterVolumeSpecName: "kube-api-access-dfmlf") pod "7456396f-1779-43c3-9a7c-888c42e64a52" (UID: "7456396f-1779-43c3-9a7c-888c42e64a52"). InnerVolumeSpecName "kube-api-access-dfmlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.395644 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7456396f-1779-43c3-9a7c-888c42e64a52" (UID: "7456396f-1779-43c3-9a7c-888c42e64a52"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.396932 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-config" (OuterVolumeSpecName: "config") pod "7456396f-1779-43c3-9a7c-888c42e64a52" (UID: "7456396f-1779-43c3-9a7c-888c42e64a52"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.467058 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfmlf\" (UniqueName: \"kubernetes.io/projected/7456396f-1779-43c3-9a7c-888c42e64a52-kube-api-access-dfmlf\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.467092 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.467102 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7456396f-1779-43c3-9a7c-888c42e64a52-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.539751 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-99d7cb8b8-rl52f" event={"ID":"20450407-5a43-40e1-979d-49644437a4d2","Type":"ContainerStarted","Data":"c2862a88defca7a2804a5ded252516a22157e843439adf1ae5ffa1364fdd8421"} Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.539789 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-99d7cb8b8-rl52f" event={"ID":"20450407-5a43-40e1-979d-49644437a4d2","Type":"ContainerStarted","Data":"1145bd0d58b4d024fa872e4015bda6677867d44981a5bb04e44ec081ffec3e1a"} Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.541294 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mlcdv" event={"ID":"7456396f-1779-43c3-9a7c-888c42e64a52","Type":"ContainerDied","Data":"06adf16d049c4c14632b86dc43ae50979e43cb0c754ba60ef77b52d93746e962"} Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.541318 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06adf16d049c4c14632b86dc43ae50979e43cb0c754ba60ef77b52d93746e962" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.541361 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mlcdv" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.549859 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" event={"ID":"681f8c15-9cbf-4416-83c4-36429c38a18d","Type":"ContainerStarted","Data":"186c656d2ca647d5203c49066d02b34873109096006cca45fa7d819f8b971bf1"} Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.552279 5081 generic.go:334] "Generic (PLEG): container finished" podID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerID="a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e" exitCode=0 Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.552337 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" event={"ID":"b85f962b-8153-42a3-90fe-33e1d52fd292","Type":"ContainerDied","Data":"a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e"} Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.552365 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" event={"ID":"b85f962b-8153-42a3-90fe-33e1d52fd292","Type":"ContainerStarted","Data":"a4168960260f6131f99a11235647f6674bdf0009a0d76866a5c7d0daa260c67b"} Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.710944 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf65f669-nhxgq"] Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.739060 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7668c8f757-vl5hz"] Oct 03 15:49:27 crc kubenswrapper[5081]: E1003 15:49:27.739454 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7456396f-1779-43c3-9a7c-888c42e64a52" containerName="neutron-db-sync" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.739465 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7456396f-1779-43c3-9a7c-888c42e64a52" containerName="neutron-db-sync" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.739917 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7456396f-1779-43c3-9a7c-888c42e64a52" containerName="neutron-db-sync" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.740936 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.759905 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7668c8f757-vl5hz"] Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.874653 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-svc\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.874699 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-sb\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.874744 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-config\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.874822 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcktv\" (UniqueName: \"kubernetes.io/projected/2c5f576d-1c88-44ef-929c-44740b49afbd-kube-api-access-mcktv\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.874846 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-swift-storage-0\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.875202 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-nb\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.964814 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5bdd85c564-j4t4w"] Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.966313 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.971093 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.971282 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.971694 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.971911 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-n4qpv" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.976736 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-svc\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.976781 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-sb\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.976824 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-config\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.976884 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcktv\" (UniqueName: \"kubernetes.io/projected/2c5f576d-1c88-44ef-929c-44740b49afbd-kube-api-access-mcktv\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.976908 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-swift-storage-0\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.976997 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-nb\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.978175 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-sb\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.980728 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-nb\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.984312 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-svc\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.985835 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-config\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.987305 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-swift-storage-0\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:27 crc kubenswrapper[5081]: I1003 15:49:27.993014 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5bdd85c564-j4t4w"] Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.011740 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcktv\" (UniqueName: \"kubernetes.io/projected/2c5f576d-1c88-44ef-929c-44740b49afbd-kube-api-access-mcktv\") pod \"dnsmasq-dns-7668c8f757-vl5hz\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.078083 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-config\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.078143 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-combined-ca-bundle\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.078183 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-httpd-config\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.078211 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbqrd\" (UniqueName: \"kubernetes.io/projected/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-kube-api-access-pbqrd\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.078525 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-ovndb-tls-certs\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.103714 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.181262 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-config\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.181341 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-combined-ca-bundle\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.181396 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-httpd-config\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.181429 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbqrd\" (UniqueName: \"kubernetes.io/projected/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-kube-api-access-pbqrd\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.181513 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-ovndb-tls-certs\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.194113 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-httpd-config\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.197222 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-combined-ca-bundle\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.197498 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-config\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.203829 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-ovndb-tls-certs\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.214829 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbqrd\" (UniqueName: \"kubernetes.io/projected/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-kube-api-access-pbqrd\") pod \"neutron-5bdd85c564-j4t4w\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.297364 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.607741 5081 generic.go:334] "Generic (PLEG): container finished" podID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerID="4bc7fd788ed88f62710523d7d0f1940f62dcfc74674d112b0356dec51458333b" exitCode=0 Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.607803 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerDied","Data":"4bc7fd788ed88f62710523d7d0f1940f62dcfc74674d112b0356dec51458333b"} Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.743377 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.816007 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-combined-ca-bundle\") pod \"f5431832-6fed-4beb-94b5-f2c1c4917d49\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.816110 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-run-httpd\") pod \"f5431832-6fed-4beb-94b5-f2c1c4917d49\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.816199 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkjms\" (UniqueName: \"kubernetes.io/projected/f5431832-6fed-4beb-94b5-f2c1c4917d49-kube-api-access-bkjms\") pod \"f5431832-6fed-4beb-94b5-f2c1c4917d49\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.816256 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-config-data\") pod \"f5431832-6fed-4beb-94b5-f2c1c4917d49\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.816312 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-log-httpd\") pod \"f5431832-6fed-4beb-94b5-f2c1c4917d49\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.816372 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-sg-core-conf-yaml\") pod \"f5431832-6fed-4beb-94b5-f2c1c4917d49\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.816432 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-scripts\") pod \"f5431832-6fed-4beb-94b5-f2c1c4917d49\" (UID: \"f5431832-6fed-4beb-94b5-f2c1c4917d49\") " Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.833537 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f5431832-6fed-4beb-94b5-f2c1c4917d49" (UID: "f5431832-6fed-4beb-94b5-f2c1c4917d49"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.843726 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f5431832-6fed-4beb-94b5-f2c1c4917d49" (UID: "f5431832-6fed-4beb-94b5-f2c1c4917d49"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.857701 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-scripts" (OuterVolumeSpecName: "scripts") pod "f5431832-6fed-4beb-94b5-f2c1c4917d49" (UID: "f5431832-6fed-4beb-94b5-f2c1c4917d49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.864496 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5431832-6fed-4beb-94b5-f2c1c4917d49-kube-api-access-bkjms" (OuterVolumeSpecName: "kube-api-access-bkjms") pod "f5431832-6fed-4beb-94b5-f2c1c4917d49" (UID: "f5431832-6fed-4beb-94b5-f2c1c4917d49"). InnerVolumeSpecName "kube-api-access-bkjms". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.919984 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.920022 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkjms\" (UniqueName: \"kubernetes.io/projected/f5431832-6fed-4beb-94b5-f2c1c4917d49-kube-api-access-bkjms\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.920032 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5431832-6fed-4beb-94b5-f2c1c4917d49-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.920040 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:28 crc kubenswrapper[5081]: W1003 15:49:28.945989 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c5f576d_1c88_44ef_929c_44740b49afbd.slice/crio-4544bf55b36e0528863e47b61c3ee98981e39d83a74c43e7bb0df46cba7739f2 WatchSource:0}: Error finding container 4544bf55b36e0528863e47b61c3ee98981e39d83a74c43e7bb0df46cba7739f2: Status 404 returned error can't find the container with id 4544bf55b36e0528863e47b61c3ee98981e39d83a74c43e7bb0df46cba7739f2 Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.984690 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5431832-6fed-4beb-94b5-f2c1c4917d49" (UID: "f5431832-6fed-4beb-94b5-f2c1c4917d49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:28 crc kubenswrapper[5081]: I1003 15:49:28.991057 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f5431832-6fed-4beb-94b5-f2c1c4917d49" (UID: "f5431832-6fed-4beb-94b5-f2c1c4917d49"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.022840 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.022871 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.027857 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7668c8f757-vl5hz"] Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.060656 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-config-data" (OuterVolumeSpecName: "config-data") pod "f5431832-6fed-4beb-94b5-f2c1c4917d49" (UID: "f5431832-6fed-4beb-94b5-f2c1c4917d49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.126178 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5431832-6fed-4beb-94b5-f2c1c4917d49-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.146108 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5bdd85c564-j4t4w"] Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.616476 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" event={"ID":"2c5f576d-1c88-44ef-929c-44740b49afbd","Type":"ContainerStarted","Data":"4544bf55b36e0528863e47b61c3ee98981e39d83a74c43e7bb0df46cba7739f2"} Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.620518 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.620624 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5431832-6fed-4beb-94b5-f2c1c4917d49","Type":"ContainerDied","Data":"5a49898093a6b654d7e4bc7c225a759f030411537077d385bf5892d16cb9e569"} Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.620702 5081 scope.go:117] "RemoveContainer" containerID="a97318e6854d3447afbe240b9fac493d55de58a6a17c4282a1cae5fecfab39b5" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.623240 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-99d7cb8b8-rl52f" event={"ID":"20450407-5a43-40e1-979d-49644437a4d2","Type":"ContainerStarted","Data":"2e297fbe59922dcce998b5d34e87d306ba71634a97f5870a5f775af6d0e8ba09"} Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.624639 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5bdd85c564-j4t4w" event={"ID":"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2","Type":"ContainerStarted","Data":"906ccbde6a35ad58c05707e9c452efd7be1d99e59d8e11ad5a949f9c5ccf4b48"} Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.647382 5081 scope.go:117] "RemoveContainer" containerID="26d92e5326c776c8b8b24981a7522bcd3c972ca4f4887bb5abb550c2c3acb072" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.672339 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.693796 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698164 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:29 crc kubenswrapper[5081]: E1003 15:49:29.698655 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="sg-core" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698683 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="sg-core" Oct 03 15:49:29 crc kubenswrapper[5081]: E1003 15:49:29.698719 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-central-agent" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698728 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-central-agent" Oct 03 15:49:29 crc kubenswrapper[5081]: E1003 15:49:29.698744 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-notification-agent" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698753 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-notification-agent" Oct 03 15:49:29 crc kubenswrapper[5081]: E1003 15:49:29.698767 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="proxy-httpd" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698774 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="proxy-httpd" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698963 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="proxy-httpd" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698974 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-notification-agent" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.698983 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="sg-core" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.699002 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" containerName="ceilometer-central-agent" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.700815 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.706908 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.708213 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.718493 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.741086 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-scripts\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.741153 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.741187 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.741286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-log-httpd\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.741324 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-config-data\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.741342 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdc74\" (UniqueName: \"kubernetes.io/projected/f082352a-1587-4e9f-95fa-baec60a1ee6a-kube-api-access-hdc74\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.741367 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-run-httpd\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.839218 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5431832-6fed-4beb-94b5-f2c1c4917d49" path="/var/lib/kubelet/pods/f5431832-6fed-4beb-94b5-f2c1c4917d49/volumes" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843361 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-log-httpd\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843429 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-config-data\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843456 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdc74\" (UniqueName: \"kubernetes.io/projected/f082352a-1587-4e9f-95fa-baec60a1ee6a-kube-api-access-hdc74\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843544 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-run-httpd\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843617 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-scripts\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843654 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843695 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.843908 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-log-httpd\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.844151 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-run-httpd\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.848522 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.857510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-scripts\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.859963 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-config-data\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.860456 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:29 crc kubenswrapper[5081]: I1003 15:49:29.862867 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdc74\" (UniqueName: \"kubernetes.io/projected/f082352a-1587-4e9f-95fa-baec60a1ee6a-kube-api-access-hdc74\") pod \"ceilometer-0\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " pod="openstack/ceilometer-0" Oct 03 15:49:30 crc kubenswrapper[5081]: I1003 15:49:30.090545 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.242849 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5ff85fbc4f-f9zcx"] Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.245093 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.247131 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.247832 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.253753 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ff85fbc4f-f9zcx"] Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.298541 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-config\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.298791 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-httpd-config\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.298867 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-public-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.298972 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-combined-ca-bundle\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.299076 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-ovndb-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.299157 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv59w\" (UniqueName: \"kubernetes.io/projected/264449b6-a64d-4d0b-a465-616fa49b3eca-kube-api-access-dv59w\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.299242 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-internal-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.400752 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-combined-ca-bundle\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.400841 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-ovndb-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.400881 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv59w\" (UniqueName: \"kubernetes.io/projected/264449b6-a64d-4d0b-a465-616fa49b3eca-kube-api-access-dv59w\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.400907 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-internal-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.400978 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-config\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.400994 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-httpd-config\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.401017 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-public-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.406897 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-httpd-config\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.408602 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-config\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.409511 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-internal-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.414132 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-ovndb-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.414189 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-public-tls-certs\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.415844 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-combined-ca-bundle\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.428098 5081 scope.go:117] "RemoveContainer" containerID="4bc7fd788ed88f62710523d7d0f1940f62dcfc74674d112b0356dec51458333b" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.438176 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv59w\" (UniqueName: \"kubernetes.io/projected/264449b6-a64d-4d0b-a465-616fa49b3eca-kube-api-access-dv59w\") pod \"neutron-5ff85fbc4f-f9zcx\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.554550 5081 scope.go:117] "RemoveContainer" containerID="5a5653bb42b303da200835207bfbab66727da8afe29a91910c498bea52c7d5c6" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.575239 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:31 crc kubenswrapper[5081]: I1003 15:49:31.892336 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.090993 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ff85fbc4f-f9zcx"] Oct 03 15:49:32 crc kubenswrapper[5081]: W1003 15:49:32.091788 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod264449b6_a64d_4d0b_a465_616fa49b3eca.slice/crio-89dc724fc8899b69f08b99987a7aed7d102c8072ed2e84563fffdc198c3b01f2 WatchSource:0}: Error finding container 89dc724fc8899b69f08b99987a7aed7d102c8072ed2e84563fffdc198c3b01f2: Status 404 returned error can't find the container with id 89dc724fc8899b69f08b99987a7aed7d102c8072ed2e84563fffdc198c3b01f2 Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.484119 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-57cd5fc748-p2rdp"] Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.486525 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.489545 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.490270 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.503007 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57cd5fc748-p2rdp"] Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.634766 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49cb0be2-f988-48bc-afd9-bb4bd348de1f-logs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.634920 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.634992 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-combined-ca-bundle\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.635015 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-public-tls-certs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.635101 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-internal-tls-certs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.635225 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhbkv\" (UniqueName: \"kubernetes.io/projected/49cb0be2-f988-48bc-afd9-bb4bd348de1f-kube-api-access-hhbkv\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.635311 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data-custom\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.663819 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" event={"ID":"b85f962b-8153-42a3-90fe-33e1d52fd292","Type":"ContainerStarted","Data":"203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b"} Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.665392 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerStarted","Data":"36953df7b10b4ec648bbebd2790357614ca2274b43c09d610fcf75f1d543a569"} Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.666820 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ff85fbc4f-f9zcx" event={"ID":"264449b6-a64d-4d0b-a465-616fa49b3eca","Type":"ContainerStarted","Data":"89dc724fc8899b69f08b99987a7aed7d102c8072ed2e84563fffdc198c3b01f2"} Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.737427 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data-custom\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.737910 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49cb0be2-f988-48bc-afd9-bb4bd348de1f-logs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.737942 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.738007 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-combined-ca-bundle\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.738032 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-public-tls-certs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.738053 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-internal-tls-certs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.738094 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhbkv\" (UniqueName: \"kubernetes.io/projected/49cb0be2-f988-48bc-afd9-bb4bd348de1f-kube-api-access-hhbkv\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.738335 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49cb0be2-f988-48bc-afd9-bb4bd348de1f-logs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.743517 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data-custom\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.743966 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.744485 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-combined-ca-bundle\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.746374 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-internal-tls-certs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.748191 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-public-tls-certs\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.754865 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhbkv\" (UniqueName: \"kubernetes.io/projected/49cb0be2-f988-48bc-afd9-bb4bd348de1f-kube-api-access-hhbkv\") pod \"barbican-api-57cd5fc748-p2rdp\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:32 crc kubenswrapper[5081]: I1003 15:49:32.808347 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.256374 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57cd5fc748-p2rdp"] Oct 03 15:49:33 crc kubenswrapper[5081]: W1003 15:49:33.459983 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49cb0be2_f988_48bc_afd9_bb4bd348de1f.slice/crio-104a21cc5931b0e376a6fbe4b797b78ae8bc6426e70ed6f12cb1bdfee5feb1b9 WatchSource:0}: Error finding container 104a21cc5931b0e376a6fbe4b797b78ae8bc6426e70ed6f12cb1bdfee5feb1b9: Status 404 returned error can't find the container with id 104a21cc5931b0e376a6fbe4b797b78ae8bc6426e70ed6f12cb1bdfee5feb1b9 Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.676522 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5bdd85c564-j4t4w" event={"ID":"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2","Type":"ContainerStarted","Data":"33b201c7a663b865369e6350c02ba3702348259b089686f798961ebe1336e7e1"} Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.683296 5081 generic.go:334] "Generic (PLEG): container finished" podID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerID="f2926773a6b74e6e44743ee127345174154bef47185f4e9eb7217e6fe6870005" exitCode=0 Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.683457 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" event={"ID":"2c5f576d-1c88-44ef-929c-44740b49afbd","Type":"ContainerDied","Data":"f2926773a6b74e6e44743ee127345174154bef47185f4e9eb7217e6fe6870005"} Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.686139 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ff85fbc4f-f9zcx" event={"ID":"264449b6-a64d-4d0b-a465-616fa49b3eca","Type":"ContainerStarted","Data":"c7fd1d71f2d9ba417f029d604edbb305edd80c93b20a0706ffa0b1ed0e2b1efc"} Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.689883 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57cd5fc748-p2rdp" event={"ID":"49cb0be2-f988-48bc-afd9-bb4bd348de1f","Type":"ContainerStarted","Data":"104a21cc5931b0e376a6fbe4b797b78ae8bc6426e70ed6f12cb1bdfee5feb1b9"} Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.689923 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.690134 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" podUID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerName="dnsmasq-dns" containerID="cri-o://203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b" gracePeriod=10 Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.690268 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.690292 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.742445 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-99d7cb8b8-rl52f" podStartSLOduration=8.742419437 podStartE2EDuration="8.742419437s" podCreationTimestamp="2025-10-03 15:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:33.730939644 +0000 UTC m=+1292.696496257" watchObservedRunningTime="2025-10-03 15:49:33.742419437 +0000 UTC m=+1292.707976060" Oct 03 15:49:33 crc kubenswrapper[5081]: I1003 15:49:33.767233 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" podStartSLOduration=8.767207974 podStartE2EDuration="8.767207974s" podCreationTimestamp="2025-10-03 15:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:33.756763932 +0000 UTC m=+1292.722320545" watchObservedRunningTime="2025-10-03 15:49:33.767207974 +0000 UTC m=+1292.732764597" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.309448 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.379208 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-svc\") pod \"b85f962b-8153-42a3-90fe-33e1d52fd292\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.379270 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-sb\") pod \"b85f962b-8153-42a3-90fe-33e1d52fd292\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.379373 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsdl9\" (UniqueName: \"kubernetes.io/projected/b85f962b-8153-42a3-90fe-33e1d52fd292-kube-api-access-gsdl9\") pod \"b85f962b-8153-42a3-90fe-33e1d52fd292\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.379418 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-config\") pod \"b85f962b-8153-42a3-90fe-33e1d52fd292\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.379436 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-nb\") pod \"b85f962b-8153-42a3-90fe-33e1d52fd292\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.379502 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-swift-storage-0\") pod \"b85f962b-8153-42a3-90fe-33e1d52fd292\" (UID: \"b85f962b-8153-42a3-90fe-33e1d52fd292\") " Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.386426 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b85f962b-8153-42a3-90fe-33e1d52fd292-kube-api-access-gsdl9" (OuterVolumeSpecName: "kube-api-access-gsdl9") pod "b85f962b-8153-42a3-90fe-33e1d52fd292" (UID: "b85f962b-8153-42a3-90fe-33e1d52fd292"). InnerVolumeSpecName "kube-api-access-gsdl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.463316 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-config" (OuterVolumeSpecName: "config") pod "b85f962b-8153-42a3-90fe-33e1d52fd292" (UID: "b85f962b-8153-42a3-90fe-33e1d52fd292"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.469225 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b85f962b-8153-42a3-90fe-33e1d52fd292" (UID: "b85f962b-8153-42a3-90fe-33e1d52fd292"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.477447 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b85f962b-8153-42a3-90fe-33e1d52fd292" (UID: "b85f962b-8153-42a3-90fe-33e1d52fd292"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.482820 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.482856 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.482868 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.482878 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsdl9\" (UniqueName: \"kubernetes.io/projected/b85f962b-8153-42a3-90fe-33e1d52fd292-kube-api-access-gsdl9\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.568171 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b85f962b-8153-42a3-90fe-33e1d52fd292" (UID: "b85f962b-8153-42a3-90fe-33e1d52fd292"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.582039 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b85f962b-8153-42a3-90fe-33e1d52fd292" (UID: "b85f962b-8153-42a3-90fe-33e1d52fd292"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.585840 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.585870 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b85f962b-8153-42a3-90fe-33e1d52fd292-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.712734 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57cd5fc748-p2rdp" event={"ID":"49cb0be2-f988-48bc-afd9-bb4bd348de1f","Type":"ContainerStarted","Data":"8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f"} Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.721659 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" event={"ID":"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19","Type":"ContainerStarted","Data":"424ab139179e1f0e89a94103edf7bef82522d7b08ab2d180de8ddb2ba8927c6e"} Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.729325 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5bdd85c564-j4t4w" event={"ID":"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2","Type":"ContainerStarted","Data":"47ef3c60108408db9a1b4be2ea0e08788da413b7ecb03dd036cc7204812d8491"} Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.745001 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerStarted","Data":"85943a8df529b81061f58996a21c0eeb8ce082102c43f331de24240cbc8080c0"} Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.759214 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ff85fbc4f-f9zcx" event={"ID":"264449b6-a64d-4d0b-a465-616fa49b3eca","Type":"ContainerStarted","Data":"c75af55c799a16c6d3fb1fcae9ca1ebc16c7f7ce2f3b26cb7521040390a192ad"} Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.760250 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.762906 5081 generic.go:334] "Generic (PLEG): container finished" podID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerID="203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b" exitCode=0 Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.762971 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" event={"ID":"b85f962b-8153-42a3-90fe-33e1d52fd292","Type":"ContainerDied","Data":"203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b"} Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.763011 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" event={"ID":"b85f962b-8153-42a3-90fe-33e1d52fd292","Type":"ContainerDied","Data":"a4168960260f6131f99a11235647f6674bdf0009a0d76866a5c7d0daa260c67b"} Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.763034 5081 scope.go:117] "RemoveContainer" containerID="203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.763165 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf65f669-nhxgq" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.789167 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5ff85fbc4f-f9zcx" podStartSLOduration=3.789150518 podStartE2EDuration="3.789150518s" podCreationTimestamp="2025-10-03 15:49:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:34.783471003 +0000 UTC m=+1293.749027616" watchObservedRunningTime="2025-10-03 15:49:34.789150518 +0000 UTC m=+1293.754707131" Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.835648 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf65f669-nhxgq"] Oct 03 15:49:34 crc kubenswrapper[5081]: I1003 15:49:34.843928 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf65f669-nhxgq"] Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.669915 5081 scope.go:117] "RemoveContainer" containerID="a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.695313 5081 scope.go:117] "RemoveContainer" containerID="203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b" Oct 03 15:49:35 crc kubenswrapper[5081]: E1003 15:49:35.695879 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b\": container with ID starting with 203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b not found: ID does not exist" containerID="203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.695930 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b"} err="failed to get container status \"203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b\": rpc error: code = NotFound desc = could not find container \"203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b\": container with ID starting with 203cb8cdd190b9da0e40875f3e7cadc6cd065016616dfd6f7feca2b9199eb28b not found: ID does not exist" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.695958 5081 scope.go:117] "RemoveContainer" containerID="a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e" Oct 03 15:49:35 crc kubenswrapper[5081]: E1003 15:49:35.696260 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e\": container with ID starting with a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e not found: ID does not exist" containerID="a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.696293 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e"} err="failed to get container status \"a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e\": rpc error: code = NotFound desc = could not find container \"a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e\": container with ID starting with a3e7450066c08345d13dca304bafa2fe845358ced43c4db56db49a386e67c00e not found: ID does not exist" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.777055 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" event={"ID":"2c5f576d-1c88-44ef-929c-44740b49afbd","Type":"ContainerStarted","Data":"ec17f8ead412a8a955287bcb856d88f24ab90b24f1df88e67d002ae01652dd13"} Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.777077 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.777136 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.842073 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b85f962b-8153-42a3-90fe-33e1d52fd292" path="/var/lib/kubelet/pods/b85f962b-8153-42a3-90fe-33e1d52fd292/volumes" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.886165 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-99d7cb8b8-rl52f" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.900459 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:35 crc kubenswrapper[5081]: I1003 15:49:35.900918 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.017350 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.791360 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57cd5fc748-p2rdp" event={"ID":"49cb0be2-f988-48bc-afd9-bb4bd348de1f","Type":"ContainerStarted","Data":"150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3"} Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.792174 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.793102 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" event={"ID":"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19","Type":"ContainerStarted","Data":"9764cd3101ebf1f837d65e3b2926078df427b405bf605c5170fe2cd8a77e323b"} Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.795769 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pnxvc" event={"ID":"680afad9-2798-42bd-a115-81c10c66662a","Type":"ContainerStarted","Data":"43de74b5485509f34e4471be5f662ab53707b407c18cca90d50f8b325a9af06d"} Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.795805 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.890423 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-pnxvc" podStartSLOduration=4.4176784829999995 podStartE2EDuration="37.890402224s" podCreationTimestamp="2025-10-03 15:48:59 +0000 UTC" firstStartedPulling="2025-10-03 15:49:00.123192415 +0000 UTC m=+1259.088749028" lastFinishedPulling="2025-10-03 15:49:33.595916156 +0000 UTC m=+1292.561472769" observedRunningTime="2025-10-03 15:49:36.888953322 +0000 UTC m=+1295.854509935" watchObservedRunningTime="2025-10-03 15:49:36.890402224 +0000 UTC m=+1295.855958837" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.890595 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-57cd5fc748-p2rdp" podStartSLOduration=4.89058915 podStartE2EDuration="4.89058915s" podCreationTimestamp="2025-10-03 15:49:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:36.870124137 +0000 UTC m=+1295.835680740" watchObservedRunningTime="2025-10-03 15:49:36.89058915 +0000 UTC m=+1295.856145773" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.935983 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5bdd85c564-j4t4w" podStartSLOduration=9.935960223 podStartE2EDuration="9.935960223s" podCreationTimestamp="2025-10-03 15:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:36.910456125 +0000 UTC m=+1295.876012758" watchObservedRunningTime="2025-10-03 15:49:36.935960223 +0000 UTC m=+1295.901516836" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.940586 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" podStartSLOduration=9.940551486 podStartE2EDuration="9.940551486s" podCreationTimestamp="2025-10-03 15:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:36.931759892 +0000 UTC m=+1295.897316525" watchObservedRunningTime="2025-10-03 15:49:36.940551486 +0000 UTC m=+1295.906108119" Oct 03 15:49:36 crc kubenswrapper[5081]: I1003 15:49:36.959206 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" podStartSLOduration=4.897773981 podStartE2EDuration="11.959176335s" podCreationTimestamp="2025-10-03 15:49:25 +0000 UTC" firstStartedPulling="2025-10-03 15:49:26.410655266 +0000 UTC m=+1285.376211879" lastFinishedPulling="2025-10-03 15:49:33.47205762 +0000 UTC m=+1292.437614233" observedRunningTime="2025-10-03 15:49:36.955493899 +0000 UTC m=+1295.921050512" watchObservedRunningTime="2025-10-03 15:49:36.959176335 +0000 UTC m=+1295.924732948" Oct 03 15:49:37 crc kubenswrapper[5081]: I1003 15:49:37.808898 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:37 crc kubenswrapper[5081]: I1003 15:49:37.812797 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerStarted","Data":"1fe8dead4ffd55edc421a9224345f21f357585af666bad8201bd619bfd771d49"} Oct 03 15:49:37 crc kubenswrapper[5081]: I1003 15:49:37.813144 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerStarted","Data":"bff46afe4737f638169c5f91acfe8c9e72fdce201b79540cce253c33f242566e"} Oct 03 15:49:37 crc kubenswrapper[5081]: I1003 15:49:37.818900 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" event={"ID":"681f8c15-9cbf-4416-83c4-36429c38a18d","Type":"ContainerStarted","Data":"9791a255062a856210f6f6d869def3d35e782763a70a0b45636ea36481f99787"} Oct 03 15:49:37 crc kubenswrapper[5081]: I1003 15:49:37.818980 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" event={"ID":"681f8c15-9cbf-4416-83c4-36429c38a18d","Type":"ContainerStarted","Data":"5edccb45492f37a61a761bda119802fecb308fa81cec1b916185a47045d64830"} Oct 03 15:49:37 crc kubenswrapper[5081]: I1003 15:49:37.852003 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" podStartSLOduration=2.789055239 podStartE2EDuration="12.851983071s" podCreationTimestamp="2025-10-03 15:49:25 +0000 UTC" firstStartedPulling="2025-10-03 15:49:26.708536369 +0000 UTC m=+1285.674092992" lastFinishedPulling="2025-10-03 15:49:36.771464211 +0000 UTC m=+1295.737020824" observedRunningTime="2025-10-03 15:49:37.842167756 +0000 UTC m=+1296.807724369" watchObservedRunningTime="2025-10-03 15:49:37.851983071 +0000 UTC m=+1296.817539684" Oct 03 15:49:38 crc kubenswrapper[5081]: I1003 15:49:38.050488 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:39 crc kubenswrapper[5081]: I1003 15:49:39.095022 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:49:39 crc kubenswrapper[5081]: I1003 15:49:39.839442 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerStarted","Data":"49e16e0d55d073e679531678673b03aee279eebb24d59478d4621ebbc075545f"} Oct 03 15:49:39 crc kubenswrapper[5081]: I1003 15:49:39.839496 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 15:49:39 crc kubenswrapper[5081]: I1003 15:49:39.857325 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.5058143 podStartE2EDuration="10.8573032s" podCreationTimestamp="2025-10-03 15:49:29 +0000 UTC" firstStartedPulling="2025-10-03 15:49:31.907950413 +0000 UTC m=+1290.873507046" lastFinishedPulling="2025-10-03 15:49:39.259439333 +0000 UTC m=+1298.224995946" observedRunningTime="2025-10-03 15:49:39.855853258 +0000 UTC m=+1298.821409871" watchObservedRunningTime="2025-10-03 15:49:39.8573032 +0000 UTC m=+1298.822859823" Oct 03 15:49:40 crc kubenswrapper[5081]: I1003 15:49:40.424727 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:41 crc kubenswrapper[5081]: I1003 15:49:41.863225 5081 generic.go:334] "Generic (PLEG): container finished" podID="680afad9-2798-42bd-a115-81c10c66662a" containerID="43de74b5485509f34e4471be5f662ab53707b407c18cca90d50f8b325a9af06d" exitCode=0 Oct 03 15:49:41 crc kubenswrapper[5081]: I1003 15:49:41.863311 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pnxvc" event={"ID":"680afad9-2798-42bd-a115-81c10c66662a","Type":"ContainerDied","Data":"43de74b5485509f34e4471be5f662ab53707b407c18cca90d50f8b325a9af06d"} Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.080803 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.151687 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-99d7cb8b8-rl52f"] Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.151962 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-99d7cb8b8-rl52f" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api-log" containerID="cri-o://c2862a88defca7a2804a5ded252516a22157e843439adf1ae5ffa1364fdd8421" gracePeriod=30 Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.152450 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-99d7cb8b8-rl52f" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api" containerID="cri-o://2e297fbe59922dcce998b5d34e87d306ba71634a97f5870a5f775af6d0e8ba09" gracePeriod=30 Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.873574 5081 generic.go:334] "Generic (PLEG): container finished" podID="20450407-5a43-40e1-979d-49644437a4d2" containerID="c2862a88defca7a2804a5ded252516a22157e843439adf1ae5ffa1364fdd8421" exitCode=143 Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.873781 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-99d7cb8b8-rl52f" event={"ID":"20450407-5a43-40e1-979d-49644437a4d2","Type":"ContainerDied","Data":"c2862a88defca7a2804a5ded252516a22157e843439adf1ae5ffa1364fdd8421"} Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.952633 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 15:49:42 crc kubenswrapper[5081]: E1003 15:49:42.953205 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerName="dnsmasq-dns" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.953232 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerName="dnsmasq-dns" Oct 03 15:49:42 crc kubenswrapper[5081]: E1003 15:49:42.953262 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerName="init" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.953271 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerName="init" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.953511 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b85f962b-8153-42a3-90fe-33e1d52fd292" containerName="dnsmasq-dns" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.954335 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.957362 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.958505 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.960808 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 15:49:42 crc kubenswrapper[5081]: I1003 15:49:42.967031 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-lmj7w" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.066767 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-767dh\" (UniqueName: \"kubernetes.io/projected/79acb98c-21e6-4bad-ad5b-4ebc855e6378-kube-api-access-767dh\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.067192 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-combined-ca-bundle\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.067267 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config-secret\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.067317 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.105746 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.169909 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-combined-ca-bundle\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.169971 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config-secret\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.170005 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.170056 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-767dh\" (UniqueName: \"kubernetes.io/projected/79acb98c-21e6-4bad-ad5b-4ebc855e6378-kube-api-access-767dh\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.182426 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.189972 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-combined-ca-bundle\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.199161 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config-secret\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.210606 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-767dh\" (UniqueName: \"kubernetes.io/projected/79acb98c-21e6-4bad-ad5b-4ebc855e6378-kube-api-access-767dh\") pod \"openstackclient\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.221631 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55b4c6976c-nbgt8"] Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.221909 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" podUID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerName="dnsmasq-dns" containerID="cri-o://027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c" gracePeriod=10 Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.274011 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.527733 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.698382 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/680afad9-2798-42bd-a115-81c10c66662a-etc-machine-id\") pod \"680afad9-2798-42bd-a115-81c10c66662a\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.698461 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-config-data\") pod \"680afad9-2798-42bd-a115-81c10c66662a\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.698523 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/680afad9-2798-42bd-a115-81c10c66662a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "680afad9-2798-42bd-a115-81c10c66662a" (UID: "680afad9-2798-42bd-a115-81c10c66662a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.698610 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wb5m9\" (UniqueName: \"kubernetes.io/projected/680afad9-2798-42bd-a115-81c10c66662a-kube-api-access-wb5m9\") pod \"680afad9-2798-42bd-a115-81c10c66662a\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.698642 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-db-sync-config-data\") pod \"680afad9-2798-42bd-a115-81c10c66662a\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.698712 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-combined-ca-bundle\") pod \"680afad9-2798-42bd-a115-81c10c66662a\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.698748 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-scripts\") pod \"680afad9-2798-42bd-a115-81c10c66662a\" (UID: \"680afad9-2798-42bd-a115-81c10c66662a\") " Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.699252 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/680afad9-2798-42bd-a115-81c10c66662a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.713523 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/680afad9-2798-42bd-a115-81c10c66662a-kube-api-access-wb5m9" (OuterVolumeSpecName: "kube-api-access-wb5m9") pod "680afad9-2798-42bd-a115-81c10c66662a" (UID: "680afad9-2798-42bd-a115-81c10c66662a"). InnerVolumeSpecName "kube-api-access-wb5m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.715324 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "680afad9-2798-42bd-a115-81c10c66662a" (UID: "680afad9-2798-42bd-a115-81c10c66662a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.750903 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-scripts" (OuterVolumeSpecName: "scripts") pod "680afad9-2798-42bd-a115-81c10c66662a" (UID: "680afad9-2798-42bd-a115-81c10c66662a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.775470 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "680afad9-2798-42bd-a115-81c10c66662a" (UID: "680afad9-2798-42bd-a115-81c10c66662a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.804937 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wb5m9\" (UniqueName: \"kubernetes.io/projected/680afad9-2798-42bd-a115-81c10c66662a-kube-api-access-wb5m9\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.804980 5081 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.804993 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.805006 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.840521 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-config-data" (OuterVolumeSpecName: "config-data") pod "680afad9-2798-42bd-a115-81c10c66662a" (UID: "680afad9-2798-42bd-a115-81c10c66662a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.907842 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.910693 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680afad9-2798-42bd-a115-81c10c66662a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.910699 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pnxvc" event={"ID":"680afad9-2798-42bd-a115-81c10c66662a","Type":"ContainerDied","Data":"259faedf03908632ae4f23c432d6578689252b4a8197c64eb9cc053c801a2c16"} Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.910728 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pnxvc" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.910740 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="259faedf03908632ae4f23c432d6578689252b4a8197c64eb9cc053c801a2c16" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.928798 5081 generic.go:334] "Generic (PLEG): container finished" podID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerID="027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c" exitCode=0 Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.929084 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" event={"ID":"d39b47d6-5c20-46d7-8a31-65605a26ceb3","Type":"ContainerDied","Data":"027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c"} Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.929110 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" event={"ID":"d39b47d6-5c20-46d7-8a31-65605a26ceb3","Type":"ContainerDied","Data":"8066050777aa2f1f7d5dbde0aafb6b883a758f0b0546e4ce68a5a3f99ee21c91"} Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.929132 5081 scope.go:117] "RemoveContainer" containerID="027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.929282 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b4c6976c-nbgt8" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.961760 5081 scope.go:117] "RemoveContainer" containerID="e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.986455 5081 scope.go:117] "RemoveContainer" containerID="027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c" Oct 03 15:49:43 crc kubenswrapper[5081]: E1003 15:49:43.990127 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c\": container with ID starting with 027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c not found: ID does not exist" containerID="027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.990159 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c"} err="failed to get container status \"027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c\": rpc error: code = NotFound desc = could not find container \"027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c\": container with ID starting with 027afc356c0356775cf99becc11280cc6b2d7f815e7b9bad61fda144de50410c not found: ID does not exist" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.990181 5081 scope.go:117] "RemoveContainer" containerID="e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f" Oct 03 15:49:43 crc kubenswrapper[5081]: E1003 15:49:43.990698 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f\": container with ID starting with e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f not found: ID does not exist" containerID="e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f" Oct 03 15:49:43 crc kubenswrapper[5081]: I1003 15:49:43.990729 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f"} err="failed to get container status \"e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f\": rpc error: code = NotFound desc = could not find container \"e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f\": container with ID starting with e10d8b6243b7bfcdd9cefbd7162875642d13023c55d4cba3a0c7dba700ca7b8f not found: ID does not exist" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.012246 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnwqz\" (UniqueName: \"kubernetes.io/projected/d39b47d6-5c20-46d7-8a31-65605a26ceb3-kube-api-access-rnwqz\") pod \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.012310 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-sb\") pod \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.012336 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-swift-storage-0\") pod \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.012382 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-svc\") pod \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.012407 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-config\") pod \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.012548 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-nb\") pod \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\" (UID: \"d39b47d6-5c20-46d7-8a31-65605a26ceb3\") " Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.018951 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d39b47d6-5c20-46d7-8a31-65605a26ceb3-kube-api-access-rnwqz" (OuterVolumeSpecName: "kube-api-access-rnwqz") pod "d39b47d6-5c20-46d7-8a31-65605a26ceb3" (UID: "d39b47d6-5c20-46d7-8a31-65605a26ceb3"). InnerVolumeSpecName "kube-api-access-rnwqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.067833 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.074228 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d39b47d6-5c20-46d7-8a31-65605a26ceb3" (UID: "d39b47d6-5c20-46d7-8a31-65605a26ceb3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.090501 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d39b47d6-5c20-46d7-8a31-65605a26ceb3" (UID: "d39b47d6-5c20-46d7-8a31-65605a26ceb3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.093548 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-config" (OuterVolumeSpecName: "config") pod "d39b47d6-5c20-46d7-8a31-65605a26ceb3" (UID: "d39b47d6-5c20-46d7-8a31-65605a26ceb3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.096001 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d39b47d6-5c20-46d7-8a31-65605a26ceb3" (UID: "d39b47d6-5c20-46d7-8a31-65605a26ceb3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.123612 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnwqz\" (UniqueName: \"kubernetes.io/projected/d39b47d6-5c20-46d7-8a31-65605a26ceb3-kube-api-access-rnwqz\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.123644 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.123655 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.123663 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.123852 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.132268 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d39b47d6-5c20-46d7-8a31-65605a26ceb3" (UID: "d39b47d6-5c20-46d7-8a31-65605a26ceb3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.194874 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:49:44 crc kubenswrapper[5081]: E1003 15:49:44.195296 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerName="init" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.195316 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerName="init" Oct 03 15:49:44 crc kubenswrapper[5081]: E1003 15:49:44.195333 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="680afad9-2798-42bd-a115-81c10c66662a" containerName="cinder-db-sync" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.195339 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="680afad9-2798-42bd-a115-81c10c66662a" containerName="cinder-db-sync" Oct 03 15:49:44 crc kubenswrapper[5081]: E1003 15:49:44.195360 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerName="dnsmasq-dns" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.195366 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerName="dnsmasq-dns" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.195571 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" containerName="dnsmasq-dns" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.195596 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="680afad9-2798-42bd-a115-81c10c66662a" containerName="cinder-db-sync" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.196682 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.205823 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.206500 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.206754 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.207011 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-psb4d" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.207697 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.228013 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d39b47d6-5c20-46d7-8a31-65605a26ceb3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.283799 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55b4c6976c-nbgt8"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.299674 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77d8d5886f-sw2hj"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.301359 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.313625 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55b4c6976c-nbgt8"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.328007 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77d8d5886f-sw2hj"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.329011 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.329042 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.329073 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-scripts\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.329139 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.329170 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.329189 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nprnt\" (UniqueName: \"kubernetes.io/projected/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-kube-api-access-nprnt\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.393751 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.395253 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.411603 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.416110 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431355 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431414 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nprnt\" (UniqueName: \"kubernetes.io/projected/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-kube-api-access-nprnt\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431513 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2xqb\" (UniqueName: \"kubernetes.io/projected/32b06890-1db0-4586-8127-0be88c3d6e42-kube-api-access-n2xqb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431572 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431606 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431643 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-svc\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431670 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-scripts\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431719 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-config\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431749 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-swift-storage-0\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431773 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-sb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431831 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.431865 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-nb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.432675 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.440112 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.450173 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.451640 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.461173 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-scripts\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.471064 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nprnt\" (UniqueName: \"kubernetes.io/projected/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-kube-api-access-nprnt\") pod \"cinder-scheduler-0\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.529009 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.535311 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-nb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.535368 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.535506 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2xqb\" (UniqueName: \"kubernetes.io/projected/32b06890-1db0-4586-8127-0be88c3d6e42-kube-api-access-n2xqb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.535540 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79415ed3-b30f-466b-bcfe-82f0fca48c68-etc-machine-id\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536125 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-svc\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536249 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-config\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536329 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data-custom\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536375 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-swift-storage-0\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536403 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-sb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536431 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk75n\" (UniqueName: \"kubernetes.io/projected/79415ed3-b30f-466b-bcfe-82f0fca48c68-kube-api-access-bk75n\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536476 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536508 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79415ed3-b30f-466b-bcfe-82f0fca48c68-logs\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536539 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-scripts\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.536598 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-nb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.537314 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-sb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.537817 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-config\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.538029 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-swift-storage-0\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.538255 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-svc\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.559280 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2xqb\" (UniqueName: \"kubernetes.io/projected/32b06890-1db0-4586-8127-0be88c3d6e42-kube-api-access-n2xqb\") pod \"dnsmasq-dns-77d8d5886f-sw2hj\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638233 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79415ed3-b30f-466b-bcfe-82f0fca48c68-etc-machine-id\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638323 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data-custom\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638345 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk75n\" (UniqueName: \"kubernetes.io/projected/79415ed3-b30f-466b-bcfe-82f0fca48c68-kube-api-access-bk75n\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638368 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638385 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79415ed3-b30f-466b-bcfe-82f0fca48c68-logs\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638405 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-scripts\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638436 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.638807 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79415ed3-b30f-466b-bcfe-82f0fca48c68-etc-machine-id\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.639050 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79415ed3-b30f-466b-bcfe-82f0fca48c68-logs\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.642483 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data-custom\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.644285 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.644738 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-scripts\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.647230 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.667015 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk75n\" (UniqueName: \"kubernetes.io/projected/79415ed3-b30f-466b-bcfe-82f0fca48c68-kube-api-access-bk75n\") pod \"cinder-api-0\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.695853 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.725809 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.979943 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:49:44 crc kubenswrapper[5081]: I1003 15:49:44.992686 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"79acb98c-21e6-4bad-ad5b-4ebc855e6378","Type":"ContainerStarted","Data":"778b053996e6111c2b4d5fd032383b7a6fec14aeb98afaddf9c1224cbdbafe8a"} Oct 03 15:49:45 crc kubenswrapper[5081]: I1003 15:49:45.314486 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:45 crc kubenswrapper[5081]: I1003 15:49:45.344096 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77d8d5886f-sw2hj"] Oct 03 15:49:45 crc kubenswrapper[5081]: I1003 15:49:45.875634 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d39b47d6-5c20-46d7-8a31-65605a26ceb3" path="/var/lib/kubelet/pods/d39b47d6-5c20-46d7-8a31-65605a26ceb3/volumes" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.008728 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"79415ed3-b30f-466b-bcfe-82f0fca48c68","Type":"ContainerStarted","Data":"f4d025a8f515d9a8d7c2dc14281a3ea635e2b0369b301609e51bc5e01ada065b"} Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.010483 5081 generic.go:334] "Generic (PLEG): container finished" podID="20450407-5a43-40e1-979d-49644437a4d2" containerID="2e297fbe59922dcce998b5d34e87d306ba71634a97f5870a5f775af6d0e8ba09" exitCode=0 Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.010539 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-99d7cb8b8-rl52f" event={"ID":"20450407-5a43-40e1-979d-49644437a4d2","Type":"ContainerDied","Data":"2e297fbe59922dcce998b5d34e87d306ba71634a97f5870a5f775af6d0e8ba09"} Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.026221 5081 generic.go:334] "Generic (PLEG): container finished" podID="32b06890-1db0-4586-8127-0be88c3d6e42" containerID="63f8fcb4ff665dedcfbfc051a978fa3194e0487aaf89da56acd023add4fdef5d" exitCode=0 Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.026333 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" event={"ID":"32b06890-1db0-4586-8127-0be88c3d6e42","Type":"ContainerDied","Data":"63f8fcb4ff665dedcfbfc051a978fa3194e0487aaf89da56acd023add4fdef5d"} Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.026366 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" event={"ID":"32b06890-1db0-4586-8127-0be88c3d6e42","Type":"ContainerStarted","Data":"f6fe3e07305c0a00b0ab846cc0a379ddf7bda20d22ad2961d31649f3733e2b26"} Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.037487 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"66c4d93c-7cd2-4f90-b582-0691aa49e1d2","Type":"ContainerStarted","Data":"b1b4031a8788c812e1889760f0fb56a2244abe08e61555e62cceab2d218ae039"} Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.078999 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.217947 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data-custom\") pod \"20450407-5a43-40e1-979d-49644437a4d2\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.218180 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-combined-ca-bundle\") pod \"20450407-5a43-40e1-979d-49644437a4d2\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.218281 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2s745\" (UniqueName: \"kubernetes.io/projected/20450407-5a43-40e1-979d-49644437a4d2-kube-api-access-2s745\") pod \"20450407-5a43-40e1-979d-49644437a4d2\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.218342 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data\") pod \"20450407-5a43-40e1-979d-49644437a4d2\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.218403 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20450407-5a43-40e1-979d-49644437a4d2-logs\") pod \"20450407-5a43-40e1-979d-49644437a4d2\" (UID: \"20450407-5a43-40e1-979d-49644437a4d2\") " Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.218903 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20450407-5a43-40e1-979d-49644437a4d2-logs" (OuterVolumeSpecName: "logs") pod "20450407-5a43-40e1-979d-49644437a4d2" (UID: "20450407-5a43-40e1-979d-49644437a4d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.223389 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20450407-5a43-40e1-979d-49644437a4d2-kube-api-access-2s745" (OuterVolumeSpecName: "kube-api-access-2s745") pod "20450407-5a43-40e1-979d-49644437a4d2" (UID: "20450407-5a43-40e1-979d-49644437a4d2"). InnerVolumeSpecName "kube-api-access-2s745". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.225297 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20450407-5a43-40e1-979d-49644437a4d2-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.225325 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2s745\" (UniqueName: \"kubernetes.io/projected/20450407-5a43-40e1-979d-49644437a4d2-kube-api-access-2s745\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.229751 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "20450407-5a43-40e1-979d-49644437a4d2" (UID: "20450407-5a43-40e1-979d-49644437a4d2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.270730 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20450407-5a43-40e1-979d-49644437a4d2" (UID: "20450407-5a43-40e1-979d-49644437a4d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.294392 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data" (OuterVolumeSpecName: "config-data") pod "20450407-5a43-40e1-979d-49644437a4d2" (UID: "20450407-5a43-40e1-979d-49644437a4d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.328227 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.328275 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.328288 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20450407-5a43-40e1-979d-49644437a4d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:46 crc kubenswrapper[5081]: I1003 15:49:46.554492 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.051955 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"66c4d93c-7cd2-4f90-b582-0691aa49e1d2","Type":"ContainerStarted","Data":"9729a8a571631ca0e31dca44ceef1da98536b4e057024fac6b4c395ec68fb2ae"} Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.063463 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"79415ed3-b30f-466b-bcfe-82f0fca48c68","Type":"ContainerStarted","Data":"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359"} Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.065801 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-99d7cb8b8-rl52f" Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.069394 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-99d7cb8b8-rl52f" event={"ID":"20450407-5a43-40e1-979d-49644437a4d2","Type":"ContainerDied","Data":"1145bd0d58b4d024fa872e4015bda6677867d44981a5bb04e44ec081ffec3e1a"} Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.069451 5081 scope.go:117] "RemoveContainer" containerID="2e297fbe59922dcce998b5d34e87d306ba71634a97f5870a5f775af6d0e8ba09" Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.089798 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" event={"ID":"32b06890-1db0-4586-8127-0be88c3d6e42","Type":"ContainerStarted","Data":"c14bf9fdfb6fe02b68250b4260c28613b3a6d7323e3eb76068e4d36b0f430635"} Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.090678 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.144542 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" podStartSLOduration=3.14448189 podStartE2EDuration="3.14448189s" podCreationTimestamp="2025-10-03 15:49:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:47.131286358 +0000 UTC m=+1306.096842991" watchObservedRunningTime="2025-10-03 15:49:47.14448189 +0000 UTC m=+1306.110038503" Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.183619 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-99d7cb8b8-rl52f"] Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.196193 5081 scope.go:117] "RemoveContainer" containerID="c2862a88defca7a2804a5ded252516a22157e843439adf1ae5ffa1364fdd8421" Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.206038 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-99d7cb8b8-rl52f"] Oct 03 15:49:47 crc kubenswrapper[5081]: I1003 15:49:47.840946 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20450407-5a43-40e1-979d-49644437a4d2" path="/var/lib/kubelet/pods/20450407-5a43-40e1-979d-49644437a4d2/volumes" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.100424 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"66c4d93c-7cd2-4f90-b582-0691aa49e1d2","Type":"ContainerStarted","Data":"bd24bd28a5156095e6ca976586e332f4440bdc22de92cf9c6c89cf12ad2a08ad"} Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.104772 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"79415ed3-b30f-466b-bcfe-82f0fca48c68","Type":"ContainerStarted","Data":"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf"} Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.104851 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.104857 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api-log" containerID="cri-o://7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359" gracePeriod=30 Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.104877 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api" containerID="cri-o://bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf" gracePeriod=30 Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.126491 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.114981966 podStartE2EDuration="4.126475417s" podCreationTimestamp="2025-10-03 15:49:44 +0000 UTC" firstStartedPulling="2025-10-03 15:49:44.992294109 +0000 UTC m=+1303.957850722" lastFinishedPulling="2025-10-03 15:49:46.00378757 +0000 UTC m=+1304.969344173" observedRunningTime="2025-10-03 15:49:48.122461141 +0000 UTC m=+1307.088017754" watchObservedRunningTime="2025-10-03 15:49:48.126475417 +0000 UTC m=+1307.092032030" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.150427 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.15040637 podStartE2EDuration="4.15040637s" podCreationTimestamp="2025-10-03 15:49:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:48.142859361 +0000 UTC m=+1307.108415984" watchObservedRunningTime="2025-10-03 15:49:48.15040637 +0000 UTC m=+1307.115962983" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.786348 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884270 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-combined-ca-bundle\") pod \"79415ed3-b30f-466b-bcfe-82f0fca48c68\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884305 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bk75n\" (UniqueName: \"kubernetes.io/projected/79415ed3-b30f-466b-bcfe-82f0fca48c68-kube-api-access-bk75n\") pod \"79415ed3-b30f-466b-bcfe-82f0fca48c68\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884330 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79415ed3-b30f-466b-bcfe-82f0fca48c68-etc-machine-id\") pod \"79415ed3-b30f-466b-bcfe-82f0fca48c68\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884364 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data\") pod \"79415ed3-b30f-466b-bcfe-82f0fca48c68\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884448 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79415ed3-b30f-466b-bcfe-82f0fca48c68-logs\") pod \"79415ed3-b30f-466b-bcfe-82f0fca48c68\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884478 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data-custom\") pod \"79415ed3-b30f-466b-bcfe-82f0fca48c68\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884532 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-scripts\") pod \"79415ed3-b30f-466b-bcfe-82f0fca48c68\" (UID: \"79415ed3-b30f-466b-bcfe-82f0fca48c68\") " Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.884683 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/79415ed3-b30f-466b-bcfe-82f0fca48c68-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "79415ed3-b30f-466b-bcfe-82f0fca48c68" (UID: "79415ed3-b30f-466b-bcfe-82f0fca48c68"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.885570 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79415ed3-b30f-466b-bcfe-82f0fca48c68-logs" (OuterVolumeSpecName: "logs") pod "79415ed3-b30f-466b-bcfe-82f0fca48c68" (UID: "79415ed3-b30f-466b-bcfe-82f0fca48c68"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.885808 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79415ed3-b30f-466b-bcfe-82f0fca48c68-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.885820 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79415ed3-b30f-466b-bcfe-82f0fca48c68-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.891141 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-scripts" (OuterVolumeSpecName: "scripts") pod "79415ed3-b30f-466b-bcfe-82f0fca48c68" (UID: "79415ed3-b30f-466b-bcfe-82f0fca48c68"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.894752 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "79415ed3-b30f-466b-bcfe-82f0fca48c68" (UID: "79415ed3-b30f-466b-bcfe-82f0fca48c68"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.912137 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79415ed3-b30f-466b-bcfe-82f0fca48c68-kube-api-access-bk75n" (OuterVolumeSpecName: "kube-api-access-bk75n") pod "79415ed3-b30f-466b-bcfe-82f0fca48c68" (UID: "79415ed3-b30f-466b-bcfe-82f0fca48c68"). InnerVolumeSpecName "kube-api-access-bk75n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.948766 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data" (OuterVolumeSpecName: "config-data") pod "79415ed3-b30f-466b-bcfe-82f0fca48c68" (UID: "79415ed3-b30f-466b-bcfe-82f0fca48c68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.954144 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79415ed3-b30f-466b-bcfe-82f0fca48c68" (UID: "79415ed3-b30f-466b-bcfe-82f0fca48c68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.987540 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.987593 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.987603 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.987613 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bk75n\" (UniqueName: \"kubernetes.io/projected/79415ed3-b30f-466b-bcfe-82f0fca48c68-kube-api-access-bk75n\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:48 crc kubenswrapper[5081]: I1003 15:49:48.987623 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79415ed3-b30f-466b-bcfe-82f0fca48c68-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.120066 5081 generic.go:334] "Generic (PLEG): container finished" podID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerID="bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf" exitCode=0 Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.120104 5081 generic.go:334] "Generic (PLEG): container finished" podID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerID="7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359" exitCode=143 Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.132837 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"79415ed3-b30f-466b-bcfe-82f0fca48c68","Type":"ContainerDied","Data":"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf"} Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.132932 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"79415ed3-b30f-466b-bcfe-82f0fca48c68","Type":"ContainerDied","Data":"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359"} Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.132949 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"79415ed3-b30f-466b-bcfe-82f0fca48c68","Type":"ContainerDied","Data":"f4d025a8f515d9a8d7c2dc14281a3ea635e2b0369b301609e51bc5e01ada065b"} Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.132972 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.132973 5081 scope.go:117] "RemoveContainer" containerID="bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.186305 5081 scope.go:117] "RemoveContainer" containerID="7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.187435 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.203599 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.217995 5081 scope.go:117] "RemoveContainer" containerID="bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf" Oct 03 15:49:49 crc kubenswrapper[5081]: E1003 15:49:49.226687 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf\": container with ID starting with bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf not found: ID does not exist" containerID="bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.226726 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf"} err="failed to get container status \"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf\": rpc error: code = NotFound desc = could not find container \"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf\": container with ID starting with bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf not found: ID does not exist" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.226753 5081 scope.go:117] "RemoveContainer" containerID="7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359" Oct 03 15:49:49 crc kubenswrapper[5081]: E1003 15:49:49.227842 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359\": container with ID starting with 7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359 not found: ID does not exist" containerID="7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.227895 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359"} err="failed to get container status \"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359\": rpc error: code = NotFound desc = could not find container \"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359\": container with ID starting with 7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359 not found: ID does not exist" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.227923 5081 scope.go:117] "RemoveContainer" containerID="bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.231524 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf"} err="failed to get container status \"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf\": rpc error: code = NotFound desc = could not find container \"bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf\": container with ID starting with bfcd04d456c301c5de1b31fc74cd4e9ab6ade038055fe799650ae639ec94d3bf not found: ID does not exist" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.231551 5081 scope.go:117] "RemoveContainer" containerID="7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.231948 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359"} err="failed to get container status \"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359\": rpc error: code = NotFound desc = could not find container \"7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359\": container with ID starting with 7f9c42dcf1a5df5c11a353af081b47c5c210d23e903c9b6b551a79bb4963e359 not found: ID does not exist" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.245940 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:49 crc kubenswrapper[5081]: E1003 15:49:49.246358 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api-log" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246374 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api-log" Oct 03 15:49:49 crc kubenswrapper[5081]: E1003 15:49:49.246387 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246393 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api" Oct 03 15:49:49 crc kubenswrapper[5081]: E1003 15:49:49.246416 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api-log" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246421 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api-log" Oct 03 15:49:49 crc kubenswrapper[5081]: E1003 15:49:49.246441 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246447 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246616 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246638 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246655 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="20450407-5a43-40e1-979d-49644437a4d2" containerName="barbican-api-log" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.246666 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" containerName="cinder-api-log" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.247607 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.250829 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.252277 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.255636 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.259232 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293192 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjlvq\" (UniqueName: \"kubernetes.io/projected/6547ead1-44e3-45f3-a668-fff64776f1f6-kube-api-access-sjlvq\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293248 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6547ead1-44e3-45f3-a668-fff64776f1f6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293274 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293331 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-scripts\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293403 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6547ead1-44e3-45f3-a668-fff64776f1f6-logs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293500 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293516 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293554 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.293585 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data-custom\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395404 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-scripts\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395495 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6547ead1-44e3-45f3-a668-fff64776f1f6-logs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395543 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395574 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395603 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395621 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data-custom\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395669 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjlvq\" (UniqueName: \"kubernetes.io/projected/6547ead1-44e3-45f3-a668-fff64776f1f6-kube-api-access-sjlvq\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395688 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6547ead1-44e3-45f3-a668-fff64776f1f6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.395707 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.396682 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6547ead1-44e3-45f3-a668-fff64776f1f6-logs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.397336 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6547ead1-44e3-45f3-a668-fff64776f1f6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.404211 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.404332 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-scripts\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.404779 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.404919 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.405061 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.406584 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data-custom\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.414401 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjlvq\" (UniqueName: \"kubernetes.io/projected/6547ead1-44e3-45f3-a668-fff64776f1f6-kube-api-access-sjlvq\") pod \"cinder-api-0\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.529806 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.531873 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-7cc85979bf-f5kj9"] Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.533990 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.537244 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.537543 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.538690 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.539715 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7cc85979bf-f5kj9"] Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.580607 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599136 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-internal-tls-certs\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599200 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-etc-swift\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599219 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-combined-ca-bundle\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599248 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5x4z\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-kube-api-access-p5x4z\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599269 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-public-tls-certs\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599328 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-run-httpd\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599353 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-log-httpd\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.599395 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-config-data\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701046 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-public-tls-certs\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701161 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-run-httpd\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701190 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-log-httpd\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701247 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-config-data\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701295 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-internal-tls-certs\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701337 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-etc-swift\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701355 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-combined-ca-bundle\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.701388 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5x4z\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-kube-api-access-p5x4z\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.702181 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-run-httpd\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.705327 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-config-data\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.705790 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-log-httpd\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.709088 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-combined-ca-bundle\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.709372 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-public-tls-certs\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.711764 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-internal-tls-certs\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.715681 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-etc-swift\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.726279 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5x4z\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-kube-api-access-p5x4z\") pod \"swift-proxy-7cc85979bf-f5kj9\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.841042 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79415ed3-b30f-466b-bcfe-82f0fca48c68" path="/var/lib/kubelet/pods/79415ed3-b30f-466b-bcfe-82f0fca48c68/volumes" Oct 03 15:49:49 crc kubenswrapper[5081]: I1003 15:49:49.854206 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.003345 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.003617 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-central-agent" containerID="cri-o://85943a8df529b81061f58996a21c0eeb8ce082102c43f331de24240cbc8080c0" gracePeriod=30 Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.004663 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-notification-agent" containerID="cri-o://bff46afe4737f638169c5f91acfe8c9e72fdce201b79540cce253c33f242566e" gracePeriod=30 Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.004688 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="sg-core" containerID="cri-o://1fe8dead4ffd55edc421a9224345f21f357585af666bad8201bd619bfd771d49" gracePeriod=30 Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.004871 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="proxy-httpd" containerID="cri-o://49e16e0d55d073e679531678673b03aee279eebb24d59478d4621ebbc075545f" gracePeriod=30 Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.094355 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.110856 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.157:3000/\": read tcp 10.217.0.2:52176->10.217.0.157:3000: read: connection reset by peer" Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.146780 5081 generic.go:334] "Generic (PLEG): container finished" podID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerID="1fe8dead4ffd55edc421a9224345f21f357585af666bad8201bd619bfd771d49" exitCode=2 Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.146843 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerDied","Data":"1fe8dead4ffd55edc421a9224345f21f357585af666bad8201bd619bfd771d49"} Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.152899 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6547ead1-44e3-45f3-a668-fff64776f1f6","Type":"ContainerStarted","Data":"6b8a6399c8657f3408bf3023cf043092593eaa0c540354e2d3c57d44064ccfb2"} Oct 03 15:49:50 crc kubenswrapper[5081]: I1003 15:49:50.398619 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7cc85979bf-f5kj9"] Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.189771 5081 generic.go:334] "Generic (PLEG): container finished" podID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerID="49e16e0d55d073e679531678673b03aee279eebb24d59478d4621ebbc075545f" exitCode=0 Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.190175 5081 generic.go:334] "Generic (PLEG): container finished" podID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerID="bff46afe4737f638169c5f91acfe8c9e72fdce201b79540cce253c33f242566e" exitCode=0 Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.189846 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerDied","Data":"49e16e0d55d073e679531678673b03aee279eebb24d59478d4621ebbc075545f"} Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.190235 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerDied","Data":"bff46afe4737f638169c5f91acfe8c9e72fdce201b79540cce253c33f242566e"} Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.190263 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerDied","Data":"85943a8df529b81061f58996a21c0eeb8ce082102c43f331de24240cbc8080c0"} Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.190189 5081 generic.go:334] "Generic (PLEG): container finished" podID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerID="85943a8df529b81061f58996a21c0eeb8ce082102c43f331de24240cbc8080c0" exitCode=0 Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.194903 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6547ead1-44e3-45f3-a668-fff64776f1f6","Type":"ContainerStarted","Data":"34c7f45d2c9cb180d7c08a560ddcfcf95e23f3fa8b56979e4ea9e41b424f888b"} Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.197116 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7cc85979bf-f5kj9" event={"ID":"262952b6-434e-4da7-8cdb-b7073a71f13c","Type":"ContainerStarted","Data":"b5c101a43a3295f40820a6e0bc4115d66a17ebbfd93458ef2f3e176a858daf89"} Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.197152 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7cc85979bf-f5kj9" event={"ID":"262952b6-434e-4da7-8cdb-b7073a71f13c","Type":"ContainerStarted","Data":"5f6d5178719cb62c16b047a9e06ac48b70916f8257a72246b0bf985d185298bd"} Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.197166 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7cc85979bf-f5kj9" event={"ID":"262952b6-434e-4da7-8cdb-b7073a71f13c","Type":"ContainerStarted","Data":"89e7805c88e4fa8234af0fc9c412e45f150caef771e07e4862918d5fc3eff90b"} Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.197411 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.197437 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.864022 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-7cc85979bf-f5kj9" podStartSLOduration=2.864001481 podStartE2EDuration="2.864001481s" podCreationTimestamp="2025-10-03 15:49:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:51.227255639 +0000 UTC m=+1310.192812282" watchObservedRunningTime="2025-10-03 15:49:51.864001481 +0000 UTC m=+1310.829558094" Oct 03 15:49:51 crc kubenswrapper[5081]: I1003 15:49:51.963363 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064034 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-log-httpd\") pod \"f082352a-1587-4e9f-95fa-baec60a1ee6a\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064076 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-scripts\") pod \"f082352a-1587-4e9f-95fa-baec60a1ee6a\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064162 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdc74\" (UniqueName: \"kubernetes.io/projected/f082352a-1587-4e9f-95fa-baec60a1ee6a-kube-api-access-hdc74\") pod \"f082352a-1587-4e9f-95fa-baec60a1ee6a\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064203 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-combined-ca-bundle\") pod \"f082352a-1587-4e9f-95fa-baec60a1ee6a\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064336 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-config-data\") pod \"f082352a-1587-4e9f-95fa-baec60a1ee6a\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064405 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-run-httpd\") pod \"f082352a-1587-4e9f-95fa-baec60a1ee6a\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064466 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-sg-core-conf-yaml\") pod \"f082352a-1587-4e9f-95fa-baec60a1ee6a\" (UID: \"f082352a-1587-4e9f-95fa-baec60a1ee6a\") " Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.064898 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f082352a-1587-4e9f-95fa-baec60a1ee6a" (UID: "f082352a-1587-4e9f-95fa-baec60a1ee6a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.065680 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f082352a-1587-4e9f-95fa-baec60a1ee6a" (UID: "f082352a-1587-4e9f-95fa-baec60a1ee6a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.072694 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f082352a-1587-4e9f-95fa-baec60a1ee6a-kube-api-access-hdc74" (OuterVolumeSpecName: "kube-api-access-hdc74") pod "f082352a-1587-4e9f-95fa-baec60a1ee6a" (UID: "f082352a-1587-4e9f-95fa-baec60a1ee6a"). InnerVolumeSpecName "kube-api-access-hdc74". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.086784 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-scripts" (OuterVolumeSpecName: "scripts") pod "f082352a-1587-4e9f-95fa-baec60a1ee6a" (UID: "f082352a-1587-4e9f-95fa-baec60a1ee6a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.162083 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f082352a-1587-4e9f-95fa-baec60a1ee6a" (UID: "f082352a-1587-4e9f-95fa-baec60a1ee6a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.167383 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.167422 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.167435 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdc74\" (UniqueName: \"kubernetes.io/projected/f082352a-1587-4e9f-95fa-baec60a1ee6a-kube-api-access-hdc74\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.167449 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f082352a-1587-4e9f-95fa-baec60a1ee6a-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.167459 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.218889 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f082352a-1587-4e9f-95fa-baec60a1ee6a","Type":"ContainerDied","Data":"36953df7b10b4ec648bbebd2790357614ca2274b43c09d610fcf75f1d543a569"} Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.219210 5081 scope.go:117] "RemoveContainer" containerID="49e16e0d55d073e679531678673b03aee279eebb24d59478d4621ebbc075545f" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.219522 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.229815 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6547ead1-44e3-45f3-a668-fff64776f1f6","Type":"ContainerStarted","Data":"11222103517d49608c40c5db5a80c4fb0e6936aa3b9146f78928edb3436b5c6e"} Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.229871 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.271541 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-config-data" (OuterVolumeSpecName: "config-data") pod "f082352a-1587-4e9f-95fa-baec60a1ee6a" (UID: "f082352a-1587-4e9f-95fa-baec60a1ee6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.282770 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f082352a-1587-4e9f-95fa-baec60a1ee6a" (UID: "f082352a-1587-4e9f-95fa-baec60a1ee6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.286206 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.286165752 podStartE2EDuration="3.286165752s" podCreationTimestamp="2025-10-03 15:49:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:49:52.274713841 +0000 UTC m=+1311.240270454" watchObservedRunningTime="2025-10-03 15:49:52.286165752 +0000 UTC m=+1311.251722365" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.373511 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.373580 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f082352a-1587-4e9f-95fa-baec60a1ee6a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.565617 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.579580 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.597952 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:52 crc kubenswrapper[5081]: E1003 15:49:52.598512 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="proxy-httpd" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598534 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="proxy-httpd" Oct 03 15:49:52 crc kubenswrapper[5081]: E1003 15:49:52.598543 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="sg-core" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598551 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="sg-core" Oct 03 15:49:52 crc kubenswrapper[5081]: E1003 15:49:52.598620 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-central-agent" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598627 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-central-agent" Oct 03 15:49:52 crc kubenswrapper[5081]: E1003 15:49:52.598658 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-notification-agent" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598665 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-notification-agent" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598831 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-notification-agent" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598850 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="ceilometer-central-agent" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598866 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="proxy-httpd" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.598885 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" containerName="sg-core" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.600834 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.602984 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.603312 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.609053 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.678782 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdfcf\" (UniqueName: \"kubernetes.io/projected/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-kube-api-access-zdfcf\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.678841 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-run-httpd\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.678925 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.678961 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-log-httpd\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.678990 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.679026 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-config-data\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.679080 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-scripts\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782123 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782170 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-config-data\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782228 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-scripts\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782321 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdfcf\" (UniqueName: \"kubernetes.io/projected/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-kube-api-access-zdfcf\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782343 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-run-httpd\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782384 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782402 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-log-httpd\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.782843 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-log-httpd\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.783329 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-run-httpd\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.789180 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-scripts\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.809289 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-config-data\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.810214 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.821956 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.826354 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdfcf\" (UniqueName: \"kubernetes.io/projected/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-kube-api-access-zdfcf\") pod \"ceilometer-0\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " pod="openstack/ceilometer-0" Oct 03 15:49:52 crc kubenswrapper[5081]: I1003 15:49:52.933921 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:49:53 crc kubenswrapper[5081]: I1003 15:49:53.840438 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f082352a-1587-4e9f-95fa-baec60a1ee6a" path="/var/lib/kubelet/pods/f082352a-1587-4e9f-95fa-baec60a1ee6a/volumes" Oct 03 15:49:54 crc kubenswrapper[5081]: I1003 15:49:54.702896 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:49:54 crc kubenswrapper[5081]: I1003 15:49:54.765380 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7668c8f757-vl5hz"] Oct 03 15:49:54 crc kubenswrapper[5081]: I1003 15:49:54.765646 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerName="dnsmasq-dns" containerID="cri-o://ec17f8ead412a8a955287bcb856d88f24ab90b24f1df88e67d002ae01652dd13" gracePeriod=10 Oct 03 15:49:54 crc kubenswrapper[5081]: I1003 15:49:54.791757 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 15:49:54 crc kubenswrapper[5081]: I1003 15:49:54.841022 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:49:55 crc kubenswrapper[5081]: I1003 15:49:55.253858 5081 generic.go:334] "Generic (PLEG): container finished" podID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerID="ec17f8ead412a8a955287bcb856d88f24ab90b24f1df88e67d002ae01652dd13" exitCode=0 Oct 03 15:49:55 crc kubenswrapper[5081]: I1003 15:49:55.254116 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="cinder-scheduler" containerID="cri-o://9729a8a571631ca0e31dca44ceef1da98536b4e057024fac6b4c395ec68fb2ae" gracePeriod=30 Oct 03 15:49:55 crc kubenswrapper[5081]: I1003 15:49:55.254225 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" event={"ID":"2c5f576d-1c88-44ef-929c-44740b49afbd","Type":"ContainerDied","Data":"ec17f8ead412a8a955287bcb856d88f24ab90b24f1df88e67d002ae01652dd13"} Oct 03 15:49:55 crc kubenswrapper[5081]: I1003 15:49:55.254309 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="probe" containerID="cri-o://bd24bd28a5156095e6ca976586e332f4440bdc22de92cf9c6c89cf12ad2a08ad" gracePeriod=30 Oct 03 15:49:56 crc kubenswrapper[5081]: I1003 15:49:56.266500 5081 generic.go:334] "Generic (PLEG): container finished" podID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerID="bd24bd28a5156095e6ca976586e332f4440bdc22de92cf9c6c89cf12ad2a08ad" exitCode=0 Oct 03 15:49:56 crc kubenswrapper[5081]: I1003 15:49:56.266573 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"66c4d93c-7cd2-4f90-b582-0691aa49e1d2","Type":"ContainerDied","Data":"bd24bd28a5156095e6ca976586e332f4440bdc22de92cf9c6c89cf12ad2a08ad"} Oct 03 15:49:58 crc kubenswrapper[5081]: I1003 15:49:58.104692 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.155:5353: connect: connection refused" Oct 03 15:49:58 crc kubenswrapper[5081]: I1003 15:49:58.298501 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:58 crc kubenswrapper[5081]: I1003 15:49:58.318164 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:49:58 crc kubenswrapper[5081]: I1003 15:49:58.360601 5081 generic.go:334] "Generic (PLEG): container finished" podID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerID="9729a8a571631ca0e31dca44ceef1da98536b4e057024fac6b4c395ec68fb2ae" exitCode=0 Oct 03 15:49:58 crc kubenswrapper[5081]: I1003 15:49:58.360686 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"66c4d93c-7cd2-4f90-b582-0691aa49e1d2","Type":"ContainerDied","Data":"9729a8a571631ca0e31dca44ceef1da98536b4e057024fac6b4c395ec68fb2ae"} Oct 03 15:49:58 crc kubenswrapper[5081]: I1003 15:49:58.813314 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:49:59 crc kubenswrapper[5081]: I1003 15:49:59.458244 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:49:59 crc kubenswrapper[5081]: I1003 15:49:59.458490 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-log" containerID="cri-o://6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45" gracePeriod=30 Oct 03 15:49:59 crc kubenswrapper[5081]: I1003 15:49:59.458603 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-httpd" containerID="cri-o://406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217" gracePeriod=30 Oct 03 15:49:59 crc kubenswrapper[5081]: I1003 15:49:59.862450 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:49:59 crc kubenswrapper[5081]: I1003 15:49:59.862929 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.383608 5081 generic.go:334] "Generic (PLEG): container finished" podID="742235ed-5eea-4b22-be74-739791069828" containerID="6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45" exitCode=143 Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.383659 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"742235ed-5eea-4b22-be74-739791069828","Type":"ContainerDied","Data":"6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45"} Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.415549 5081 scope.go:117] "RemoveContainer" containerID="1fe8dead4ffd55edc421a9224345f21f357585af666bad8201bd619bfd771d49" Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.562343 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.562639 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-log" containerID="cri-o://3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572" gracePeriod=30 Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.563153 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-httpd" containerID="cri-o://6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492" gracePeriod=30 Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.564155 5081 scope.go:117] "RemoveContainer" containerID="bff46afe4737f638169c5f91acfe8c9e72fdce201b79540cce253c33f242566e" Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.645612 5081 scope.go:117] "RemoveContainer" containerID="85943a8df529b81061f58996a21c0eeb8ce082102c43f331de24240cbc8080c0" Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.648063 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.648114 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.938261 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:50:00 crc kubenswrapper[5081]: I1003 15:50:00.944944 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.066160 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-etc-machine-id\") pod \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.066372 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "66c4d93c-7cd2-4f90-b582-0691aa49e1d2" (UID: "66c4d93c-7cd2-4f90-b582-0691aa49e1d2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.066498 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nprnt\" (UniqueName: \"kubernetes.io/projected/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-kube-api-access-nprnt\") pod \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.066759 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-nb\") pod \"2c5f576d-1c88-44ef-929c-44740b49afbd\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.066986 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-config\") pod \"2c5f576d-1c88-44ef-929c-44740b49afbd\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.067123 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-combined-ca-bundle\") pod \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.067687 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-scripts\") pod \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.067889 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcktv\" (UniqueName: \"kubernetes.io/projected/2c5f576d-1c88-44ef-929c-44740b49afbd-kube-api-access-mcktv\") pod \"2c5f576d-1c88-44ef-929c-44740b49afbd\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.067969 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-swift-storage-0\") pod \"2c5f576d-1c88-44ef-929c-44740b49afbd\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.068037 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-sb\") pod \"2c5f576d-1c88-44ef-929c-44740b49afbd\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.068116 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-svc\") pod \"2c5f576d-1c88-44ef-929c-44740b49afbd\" (UID: \"2c5f576d-1c88-44ef-929c-44740b49afbd\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.068199 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data\") pod \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.068360 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data-custom\") pod \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\" (UID: \"66c4d93c-7cd2-4f90-b582-0691aa49e1d2\") " Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.069004 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.073598 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-kube-api-access-nprnt" (OuterVolumeSpecName: "kube-api-access-nprnt") pod "66c4d93c-7cd2-4f90-b582-0691aa49e1d2" (UID: "66c4d93c-7cd2-4f90-b582-0691aa49e1d2"). InnerVolumeSpecName "kube-api-access-nprnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.077294 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-scripts" (OuterVolumeSpecName: "scripts") pod "66c4d93c-7cd2-4f90-b582-0691aa49e1d2" (UID: "66c4d93c-7cd2-4f90-b582-0691aa49e1d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.078750 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c5f576d-1c88-44ef-929c-44740b49afbd-kube-api-access-mcktv" (OuterVolumeSpecName: "kube-api-access-mcktv") pod "2c5f576d-1c88-44ef-929c-44740b49afbd" (UID: "2c5f576d-1c88-44ef-929c-44740b49afbd"). InnerVolumeSpecName "kube-api-access-mcktv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.141620 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "66c4d93c-7cd2-4f90-b582-0691aa49e1d2" (UID: "66c4d93c-7cd2-4f90-b582-0691aa49e1d2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.148947 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-config" (OuterVolumeSpecName: "config") pod "2c5f576d-1c88-44ef-929c-44740b49afbd" (UID: "2c5f576d-1c88-44ef-929c-44740b49afbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.154705 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66c4d93c-7cd2-4f90-b582-0691aa49e1d2" (UID: "66c4d93c-7cd2-4f90-b582-0691aa49e1d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.162760 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2c5f576d-1c88-44ef-929c-44740b49afbd" (UID: "2c5f576d-1c88-44ef-929c-44740b49afbd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.163435 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c5f576d-1c88-44ef-929c-44740b49afbd" (UID: "2c5f576d-1c88-44ef-929c-44740b49afbd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.165245 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170346 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcktv\" (UniqueName: \"kubernetes.io/projected/2c5f576d-1c88-44ef-929c-44740b49afbd-kube-api-access-mcktv\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170372 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170386 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170399 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nprnt\" (UniqueName: \"kubernetes.io/projected/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-kube-api-access-nprnt\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170410 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170437 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170449 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.170460 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.172110 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c5f576d-1c88-44ef-929c-44740b49afbd" (UID: "2c5f576d-1c88-44ef-929c-44740b49afbd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.172503 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2c5f576d-1c88-44ef-929c-44740b49afbd" (UID: "2c5f576d-1c88-44ef-929c-44740b49afbd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.237342 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data" (OuterVolumeSpecName: "config-data") pod "66c4d93c-7cd2-4f90-b582-0691aa49e1d2" (UID: "66c4d93c-7cd2-4f90-b582-0691aa49e1d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.271264 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66c4d93c-7cd2-4f90-b582-0691aa49e1d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.271307 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.271321 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c5f576d-1c88-44ef-929c-44740b49afbd-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.393758 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerStarted","Data":"a512b355c92dcf95a7b4f2f75ef3ab1eb6e388ba88628692259d42a29f464404"} Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.395719 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" event={"ID":"2c5f576d-1c88-44ef-929c-44740b49afbd","Type":"ContainerDied","Data":"4544bf55b36e0528863e47b61c3ee98981e39d83a74c43e7bb0df46cba7739f2"} Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.395769 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7668c8f757-vl5hz" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.395776 5081 scope.go:117] "RemoveContainer" containerID="ec17f8ead412a8a955287bcb856d88f24ab90b24f1df88e67d002ae01652dd13" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.398217 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"66c4d93c-7cd2-4f90-b582-0691aa49e1d2","Type":"ContainerDied","Data":"b1b4031a8788c812e1889760f0fb56a2244abe08e61555e62cceab2d218ae039"} Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.398247 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.402667 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"210b770a-8b67-424a-acea-05c551a48f36","Type":"ContainerDied","Data":"3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572"} Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.402707 5081 generic.go:334] "Generic (PLEG): container finished" podID="210b770a-8b67-424a-acea-05c551a48f36" containerID="3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572" exitCode=143 Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.452199 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.459905 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.473274 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:50:01 crc kubenswrapper[5081]: E1003 15:50:01.473824 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="probe" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.473866 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="probe" Oct 03 15:50:01 crc kubenswrapper[5081]: E1003 15:50:01.473882 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="cinder-scheduler" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.473891 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="cinder-scheduler" Oct 03 15:50:01 crc kubenswrapper[5081]: E1003 15:50:01.473920 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerName="dnsmasq-dns" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.473928 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerName="dnsmasq-dns" Oct 03 15:50:01 crc kubenswrapper[5081]: E1003 15:50:01.473951 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerName="init" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.473960 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerName="init" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.474213 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="probe" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.474240 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" containerName="cinder-scheduler" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.474251 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" containerName="dnsmasq-dns" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.476323 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.477625 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.484581 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.529666 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7668c8f757-vl5hz"] Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.538324 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7668c8f757-vl5hz"] Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.599211 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.679413 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.679510 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-scripts\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.679593 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.679617 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.679640 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.679661 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fvj8\" (UniqueName: \"kubernetes.io/projected/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-kube-api-access-4fvj8\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.691637 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5bdd85c564-j4t4w"] Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.691866 5081 scope.go:117] "RemoveContainer" containerID="f2926773a6b74e6e44743ee127345174154bef47185f4e9eb7217e6fe6870005" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.691890 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5bdd85c564-j4t4w" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-api" containerID="cri-o://33b201c7a663b865369e6350c02ba3702348259b089686f798961ebe1336e7e1" gracePeriod=30 Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.692411 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5bdd85c564-j4t4w" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-httpd" containerID="cri-o://47ef3c60108408db9a1b4be2ea0e08788da413b7ecb03dd036cc7204812d8491" gracePeriod=30 Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.773838 5081 scope.go:117] "RemoveContainer" containerID="bd24bd28a5156095e6ca976586e332f4440bdc22de92cf9c6c89cf12ad2a08ad" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.785391 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.785442 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.785472 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.785488 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fvj8\" (UniqueName: \"kubernetes.io/projected/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-kube-api-access-4fvj8\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.785601 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.785637 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-scripts\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.787349 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.789673 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.802106 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.805305 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-scripts\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.805412 5081 scope.go:117] "RemoveContainer" containerID="9729a8a571631ca0e31dca44ceef1da98536b4e057024fac6b4c395ec68fb2ae" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.807140 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fvj8\" (UniqueName: \"kubernetes.io/projected/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-kube-api-access-4fvj8\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.838659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.864396 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " pod="openstack/cinder-scheduler-0" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.899350 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c5f576d-1c88-44ef-929c-44740b49afbd" path="/var/lib/kubelet/pods/2c5f576d-1c88-44ef-929c-44740b49afbd/volumes" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.900010 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66c4d93c-7cd2-4f90-b582-0691aa49e1d2" path="/var/lib/kubelet/pods/66c4d93c-7cd2-4f90-b582-0691aa49e1d2/volumes" Oct 03 15:50:01 crc kubenswrapper[5081]: I1003 15:50:01.954042 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:50:02 crc kubenswrapper[5081]: I1003 15:50:02.405682 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:50:02 crc kubenswrapper[5081]: W1003 15:50:02.417690 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbc5b56e_83ed_460e_a57d_2c51df24c5dc.slice/crio-7c68f4c51d68116b886b83a476595c798152e15a298c17e3ecc5a524978ed239 WatchSource:0}: Error finding container 7c68f4c51d68116b886b83a476595c798152e15a298c17e3ecc5a524978ed239: Status 404 returned error can't find the container with id 7c68f4c51d68116b886b83a476595c798152e15a298c17e3ecc5a524978ed239 Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.325907 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.417886 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-combined-ca-bundle\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.418004 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-config-data\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.478043 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cbc5b56e-83ed-460e-a57d-2c51df24c5dc","Type":"ContainerStarted","Data":"7ad6062447176d359892df549a0f432d03509731f48fa17e1a53bd11f1b7042c"} Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.478585 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cbc5b56e-83ed-460e-a57d-2c51df24c5dc","Type":"ContainerStarted","Data":"7c68f4c51d68116b886b83a476595c798152e15a298c17e3ecc5a524978ed239"} Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.478747 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.493973 5081 generic.go:334] "Generic (PLEG): container finished" podID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerID="47ef3c60108408db9a1b4be2ea0e08788da413b7ecb03dd036cc7204812d8491" exitCode=0 Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.494053 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5bdd85c564-j4t4w" event={"ID":"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2","Type":"ContainerDied","Data":"47ef3c60108408db9a1b4be2ea0e08788da413b7ecb03dd036cc7204812d8491"} Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.502707 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-config-data" (OuterVolumeSpecName: "config-data") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.510101 5081 generic.go:334] "Generic (PLEG): container finished" podID="742235ed-5eea-4b22-be74-739791069828" containerID="406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217" exitCode=0 Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.510147 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"742235ed-5eea-4b22-be74-739791069828","Type":"ContainerDied","Data":"406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217"} Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.510174 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"742235ed-5eea-4b22-be74-739791069828","Type":"ContainerDied","Data":"ee39f5e563af73fa9df44b5893cc4dec6b00645f4dc50c703005ef8b4509b903"} Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.510191 5081 scope.go:117] "RemoveContainer" containerID="406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.510310 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.520532 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-public-tls-certs\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.520609 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-scripts\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.520630 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrlvz\" (UniqueName: \"kubernetes.io/projected/742235ed-5eea-4b22-be74-739791069828-kube-api-access-qrlvz\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.520715 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-httpd-run\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.520742 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-logs\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.520802 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"742235ed-5eea-4b22-be74-739791069828\" (UID: \"742235ed-5eea-4b22-be74-739791069828\") " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.521331 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.521348 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.522385 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.524321 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.525839 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/742235ed-5eea-4b22-be74-739791069828-kube-api-access-qrlvz" (OuterVolumeSpecName: "kube-api-access-qrlvz") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "kube-api-access-qrlvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.529686 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-scripts" (OuterVolumeSpecName: "scripts") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.535054 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-logs" (OuterVolumeSpecName: "logs") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.586887 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "742235ed-5eea-4b22-be74-739791069828" (UID: "742235ed-5eea-4b22-be74-739791069828"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.591795 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.622746 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.622980 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742235ed-5eea-4b22-be74-739791069828-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.623042 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrlvz\" (UniqueName: \"kubernetes.io/projected/742235ed-5eea-4b22-be74-739791069828-kube-api-access-qrlvz\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.623098 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.623153 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/742235ed-5eea-4b22-be74-739791069828-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.623237 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.651913 5081 scope.go:117] "RemoveContainer" containerID="6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.657860 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.681965 5081 scope.go:117] "RemoveContainer" containerID="406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217" Oct 03 15:50:03 crc kubenswrapper[5081]: E1003 15:50:03.682477 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217\": container with ID starting with 406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217 not found: ID does not exist" containerID="406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.682516 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217"} err="failed to get container status \"406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217\": rpc error: code = NotFound desc = could not find container \"406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217\": container with ID starting with 406ff7add10764ec04233f5918ce1860c438e4c458003e865df3ed8239258217 not found: ID does not exist" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.682542 5081 scope.go:117] "RemoveContainer" containerID="6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45" Oct 03 15:50:03 crc kubenswrapper[5081]: E1003 15:50:03.683057 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45\": container with ID starting with 6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45 not found: ID does not exist" containerID="6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.683088 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45"} err="failed to get container status \"6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45\": rpc error: code = NotFound desc = could not find container \"6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45\": container with ID starting with 6a2f5bdedb6fd57539ee42d8745a0c90ef6bd1fa1b1533b58db67aaff9167f45 not found: ID does not exist" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.724669 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.920652 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.932423 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.955686 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:50:03 crc kubenswrapper[5081]: E1003 15:50:03.956396 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-log" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.956416 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-log" Oct 03 15:50:03 crc kubenswrapper[5081]: E1003 15:50:03.956463 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-httpd" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.956470 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-httpd" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.956687 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-httpd" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.956709 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="742235ed-5eea-4b22-be74-739791069828" containerName="glance-log" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.957828 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.961453 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.961726 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 15:50:03 crc kubenswrapper[5081]: I1003 15:50:03.966877 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.135745 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.135836 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-logs\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.135882 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.135905 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.135951 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.135991 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.136016 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.136068 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gcrq\" (UniqueName: \"kubernetes.io/projected/b8341a6f-4783-4bf6-916e-ac655208ba45-kube-api-access-9gcrq\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238252 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238294 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238339 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238373 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238390 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238858 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gcrq\" (UniqueName: \"kubernetes.io/projected/b8341a6f-4783-4bf6-916e-ac655208ba45-kube-api-access-9gcrq\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238938 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.238987 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-logs\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.239320 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-logs\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.239676 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.240171 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.243753 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.258980 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.259391 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.259442 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gcrq\" (UniqueName: \"kubernetes.io/projected/b8341a6f-4783-4bf6-916e-ac655208ba45-kube-api-access-9gcrq\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.272221 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.312807 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.480408 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.532228 5081 generic.go:334] "Generic (PLEG): container finished" podID="210b770a-8b67-424a-acea-05c551a48f36" containerID="6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492" exitCode=0 Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.532307 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"210b770a-8b67-424a-acea-05c551a48f36","Type":"ContainerDied","Data":"6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492"} Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.532340 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"210b770a-8b67-424a-acea-05c551a48f36","Type":"ContainerDied","Data":"3c47a77e1071300704cc775cc253f89fe84e49e4abfeef3132d5aa6d3ce22b5c"} Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.532360 5081 scope.go:117] "RemoveContainer" containerID="6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.532532 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.549060 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"79acb98c-21e6-4bad-ad5b-4ebc855e6378","Type":"ContainerStarted","Data":"c817d8193802204a47e564f00d89b7300147b2f6e4a65ec259ecd73cefe10317"} Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.569813 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.997785896 podStartE2EDuration="22.569797549s" podCreationTimestamp="2025-10-03 15:49:42 +0000 UTC" firstStartedPulling="2025-10-03 15:49:44.082759459 +0000 UTC m=+1303.048316072" lastFinishedPulling="2025-10-03 15:50:03.654771112 +0000 UTC m=+1322.620327725" observedRunningTime="2025-10-03 15:50:04.567826622 +0000 UTC m=+1323.533383235" watchObservedRunningTime="2025-10-03 15:50:04.569797549 +0000 UTC m=+1323.535354162" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.587697 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.588970 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.642764 5081 scope.go:117] "RemoveContainer" containerID="3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.653674 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-httpd-run\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.653738 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-scripts\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.655397 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-internal-tls-certs\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.655436 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.655582 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkfzk\" (UniqueName: \"kubernetes.io/projected/210b770a-8b67-424a-acea-05c551a48f36-kube-api-access-lkfzk\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.655616 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-config-data\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.655646 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-logs\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.655753 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-combined-ca-bundle\") pod \"210b770a-8b67-424a-acea-05c551a48f36\" (UID: \"210b770a-8b67-424a-acea-05c551a48f36\") " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.656125 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-logs" (OuterVolumeSpecName: "logs") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.656241 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.660257 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.660281 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/210b770a-8b67-424a-acea-05c551a48f36-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.677797 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.677864 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210b770a-8b67-424a-acea-05c551a48f36-kube-api-access-lkfzk" (OuterVolumeSpecName: "kube-api-access-lkfzk") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "kube-api-access-lkfzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.678727 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-scripts" (OuterVolumeSpecName: "scripts") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.692880 5081 scope.go:117] "RemoveContainer" containerID="6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492" Oct 03 15:50:04 crc kubenswrapper[5081]: E1003 15:50:04.693370 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492\": container with ID starting with 6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492 not found: ID does not exist" containerID="6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.693416 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492"} err="failed to get container status \"6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492\": rpc error: code = NotFound desc = could not find container \"6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492\": container with ID starting with 6fa55ab45ef25ad3e5bc9966d363ee8da116173736daf9523069963f87a20492 not found: ID does not exist" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.693442 5081 scope.go:117] "RemoveContainer" containerID="3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572" Oct 03 15:50:04 crc kubenswrapper[5081]: E1003 15:50:04.694851 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572\": container with ID starting with 3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572 not found: ID does not exist" containerID="3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.694892 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572"} err="failed to get container status \"3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572\": rpc error: code = NotFound desc = could not find container \"3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572\": container with ID starting with 3c98894f1870c82fa60dd5e0743f83c69cb5dd9147872d55e7efcae5e0dec572 not found: ID does not exist" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.713356 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.725196 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.741110 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-config-data" (OuterVolumeSpecName: "config-data") pod "210b770a-8b67-424a-acea-05c551a48f36" (UID: "210b770a-8b67-424a-acea-05c551a48f36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.762318 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkfzk\" (UniqueName: \"kubernetes.io/projected/210b770a-8b67-424a-acea-05c551a48f36-kube-api-access-lkfzk\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.762347 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.762357 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.762366 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.762375 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/210b770a-8b67-424a-acea-05c551a48f36-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.762407 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.791899 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.864046 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.895394 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.927599 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.945669 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:50:04 crc kubenswrapper[5081]: E1003 15:50:04.946156 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-log" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.946177 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-log" Oct 03 15:50:04 crc kubenswrapper[5081]: E1003 15:50:04.946232 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-httpd" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.946241 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-httpd" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.946434 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-httpd" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.946471 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="210b770a-8b67-424a-acea-05c551a48f36" containerName="glance-log" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.947552 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.952115 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.953920 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 15:50:04 crc kubenswrapper[5081]: I1003 15:50:04.954110 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.074834 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.075184 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.075209 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.075228 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.075256 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.075386 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9csvt\" (UniqueName: \"kubernetes.io/projected/ed01e164-93b5-47c7-9ec0-7a00d6942c66-kube-api-access-9csvt\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.075479 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.075523 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.088153 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-z6xfs"] Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.089842 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z6xfs" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.108998 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-z6xfs"] Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177226 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177295 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmvxd\" (UniqueName: \"kubernetes.io/projected/6e1d7200-8b2d-4cf8-be4a-7924fc195005-kube-api-access-hmvxd\") pod \"nova-api-db-create-z6xfs\" (UID: \"6e1d7200-8b2d-4cf8-be4a-7924fc195005\") " pod="openstack/nova-api-db-create-z6xfs" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177331 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177360 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177390 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177427 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177471 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9csvt\" (UniqueName: \"kubernetes.io/projected/ed01e164-93b5-47c7-9ec0-7a00d6942c66-kube-api-access-9csvt\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177513 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.177550 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.178100 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.179630 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.185004 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.185274 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.188488 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.190199 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.204241 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9csvt\" (UniqueName: \"kubernetes.io/projected/ed01e164-93b5-47c7-9ec0-7a00d6942c66-kube-api-access-9csvt\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.210375 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.226360 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-jw7x4"] Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.227638 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jw7x4" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.250011 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jw7x4"] Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.256708 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.278865 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmvxd\" (UniqueName: \"kubernetes.io/projected/6e1d7200-8b2d-4cf8-be4a-7924fc195005-kube-api-access-hmvxd\") pod \"nova-api-db-create-z6xfs\" (UID: \"6e1d7200-8b2d-4cf8-be4a-7924fc195005\") " pod="openstack/nova-api-db-create-z6xfs" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.295816 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.312312 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmvxd\" (UniqueName: \"kubernetes.io/projected/6e1d7200-8b2d-4cf8-be4a-7924fc195005-kube-api-access-hmvxd\") pod \"nova-api-db-create-z6xfs\" (UID: \"6e1d7200-8b2d-4cf8-be4a-7924fc195005\") " pod="openstack/nova-api-db-create-z6xfs" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.396017 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5v6x\" (UniqueName: \"kubernetes.io/projected/e5d75d32-d4c9-4f1c-b62d-f44a65f973a1-kube-api-access-v5v6x\") pod \"nova-cell0-db-create-jw7x4\" (UID: \"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1\") " pod="openstack/nova-cell0-db-create-jw7x4" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.415001 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z6xfs" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.474532 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.498790 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5v6x\" (UniqueName: \"kubernetes.io/projected/e5d75d32-d4c9-4f1c-b62d-f44a65f973a1-kube-api-access-v5v6x\") pod \"nova-cell0-db-create-jw7x4\" (UID: \"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1\") " pod="openstack/nova-cell0-db-create-jw7x4" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.544998 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-s54wv"] Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.546274 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-s54wv" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.556032 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5v6x\" (UniqueName: \"kubernetes.io/projected/e5d75d32-d4c9-4f1c-b62d-f44a65f973a1-kube-api-access-v5v6x\") pod \"nova-cell0-db-create-jw7x4\" (UID: \"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1\") " pod="openstack/nova-cell0-db-create-jw7x4" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.577400 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jw7x4" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.602486 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-s54wv"] Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.703369 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerStarted","Data":"f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d"} Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.750036 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdrbr\" (UniqueName: \"kubernetes.io/projected/fa9930e6-e39c-49d6-a13f-05abb8dce794-kube-api-access-bdrbr\") pod \"nova-cell1-db-create-s54wv\" (UID: \"fa9930e6-e39c-49d6-a13f-05abb8dce794\") " pod="openstack/nova-cell1-db-create-s54wv" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.758904 5081 generic.go:334] "Generic (PLEG): container finished" podID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerID="33b201c7a663b865369e6350c02ba3702348259b089686f798961ebe1336e7e1" exitCode=0 Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.758988 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5bdd85c564-j4t4w" event={"ID":"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2","Type":"ContainerDied","Data":"33b201c7a663b865369e6350c02ba3702348259b089686f798961ebe1336e7e1"} Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.804655 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8341a6f-4783-4bf6-916e-ac655208ba45","Type":"ContainerStarted","Data":"34b532534696cfce7fa57360a28da1b4d03320d22387a0dc84f4ff85a37755ca"} Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.852741 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdrbr\" (UniqueName: \"kubernetes.io/projected/fa9930e6-e39c-49d6-a13f-05abb8dce794-kube-api-access-bdrbr\") pod \"nova-cell1-db-create-s54wv\" (UID: \"fa9930e6-e39c-49d6-a13f-05abb8dce794\") " pod="openstack/nova-cell1-db-create-s54wv" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.863111 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210b770a-8b67-424a-acea-05c551a48f36" path="/var/lib/kubelet/pods/210b770a-8b67-424a-acea-05c551a48f36/volumes" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.864175 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="742235ed-5eea-4b22-be74-739791069828" path="/var/lib/kubelet/pods/742235ed-5eea-4b22-be74-739791069828/volumes" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.864942 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cbc5b56e-83ed-460e-a57d-2c51df24c5dc","Type":"ContainerStarted","Data":"b6fd92f8baa9b25fd91643a7fa2bd52ae9a8dc5be12a4823bad3dc1941b76522"} Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.887240 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.887219486 podStartE2EDuration="4.887219486s" podCreationTimestamp="2025-10-03 15:50:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:50:05.868228146 +0000 UTC m=+1324.833784759" watchObservedRunningTime="2025-10-03 15:50:05.887219486 +0000 UTC m=+1324.852776099" Oct 03 15:50:05 crc kubenswrapper[5081]: I1003 15:50:05.901383 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdrbr\" (UniqueName: \"kubernetes.io/projected/fa9930e6-e39c-49d6-a13f-05abb8dce794-kube-api-access-bdrbr\") pod \"nova-cell1-db-create-s54wv\" (UID: \"fa9930e6-e39c-49d6-a13f-05abb8dce794\") " pod="openstack/nova-cell1-db-create-s54wv" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.002227 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-s54wv" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.229291 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.248353 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.259597 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-z6xfs"] Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.379290 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbqrd\" (UniqueName: \"kubernetes.io/projected/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-kube-api-access-pbqrd\") pod \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.379749 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-httpd-config\") pod \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.379816 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-config\") pod \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.379837 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-ovndb-tls-certs\") pod \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.379883 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-combined-ca-bundle\") pod \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\" (UID: \"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2\") " Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.389906 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-kube-api-access-pbqrd" (OuterVolumeSpecName: "kube-api-access-pbqrd") pod "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" (UID: "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2"). InnerVolumeSpecName "kube-api-access-pbqrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.391768 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" (UID: "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.481547 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jw7x4"] Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.482917 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbqrd\" (UniqueName: \"kubernetes.io/projected/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-kube-api-access-pbqrd\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.482954 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.530758 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-config" (OuterVolumeSpecName: "config") pod "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" (UID: "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.555684 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" (UID: "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.583793 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.583842 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.589698 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" (UID: "25fcab82-3148-4b1a-b54e-f8fe5b3c51c2"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.660672 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-s54wv"] Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.685003 5081 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.856828 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-z6xfs" event={"ID":"6e1d7200-8b2d-4cf8-be4a-7924fc195005","Type":"ContainerStarted","Data":"ade8bfa212286876beaf7d9262ef15897fe9c6acbef01103df41d63c12e9fec3"} Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.860346 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jw7x4" event={"ID":"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1","Type":"ContainerStarted","Data":"945e44fbf8cf2043a6531a72c06de752726211ac6f1f45d8fcf8f77d4e5b5cb3"} Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.863871 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5bdd85c564-j4t4w" event={"ID":"25fcab82-3148-4b1a-b54e-f8fe5b3c51c2","Type":"ContainerDied","Data":"906ccbde6a35ad58c05707e9c452efd7be1d99e59d8e11ad5a949f9c5ccf4b48"} Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.863939 5081 scope.go:117] "RemoveContainer" containerID="47ef3c60108408db9a1b4be2ea0e08788da413b7ecb03dd036cc7204812d8491" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.863945 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5bdd85c564-j4t4w" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.868921 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-s54wv" event={"ID":"fa9930e6-e39c-49d6-a13f-05abb8dce794","Type":"ContainerStarted","Data":"a7e8abd06350e41514d5166b9bf215d1d61583452681732025e88f341ef7bd57"} Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.875073 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed01e164-93b5-47c7-9ec0-7a00d6942c66","Type":"ContainerStarted","Data":"3ed5e97bc2df7e3804d348484f45d7f6b088fdb7d7da8f6ec6a969614d4b835b"} Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.905136 5081 scope.go:117] "RemoveContainer" containerID="33b201c7a663b865369e6350c02ba3702348259b089686f798961ebe1336e7e1" Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.909854 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5bdd85c564-j4t4w"] Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.917783 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5bdd85c564-j4t4w"] Oct 03 15:50:06 crc kubenswrapper[5081]: I1003 15:50:06.954416 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.200079 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.844256 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" path="/var/lib/kubelet/pods/25fcab82-3148-4b1a-b54e-f8fe5b3c51c2/volumes" Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.886622 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed01e164-93b5-47c7-9ec0-7a00d6942c66","Type":"ContainerStarted","Data":"20c238aeaa3c71e4c9d06c0d2978a9a624c469b91b5eb72c44d08c1bce7fd2a9"} Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.889395 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-z6xfs" event={"ID":"6e1d7200-8b2d-4cf8-be4a-7924fc195005","Type":"ContainerStarted","Data":"18db5cd9adfb101e2f6a73d479aac7968e4fd25e0bc69fc94066119176608f62"} Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.896016 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jw7x4" event={"ID":"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1","Type":"ContainerStarted","Data":"68e340c1501e9dc43aadc37d497b3228a7433d115bf05a6a47d62aedac45c029"} Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.911926 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-z6xfs" podStartSLOduration=2.911895276 podStartE2EDuration="2.911895276s" podCreationTimestamp="2025-10-03 15:50:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:50:07.906409057 +0000 UTC m=+1326.871965680" watchObservedRunningTime="2025-10-03 15:50:07.911895276 +0000 UTC m=+1326.877451909" Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.912494 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-s54wv" event={"ID":"fa9930e6-e39c-49d6-a13f-05abb8dce794","Type":"ContainerStarted","Data":"86505af40b69e0e0b9847b07e4658ae402a0c4c37caa4072e1ee5ae9768dc0f6"} Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.917256 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8341a6f-4783-4bf6-916e-ac655208ba45","Type":"ContainerStarted","Data":"8d9622a52a15f0f59848d03a12a790b4b65eb968515da8ea26ad4197241e283b"} Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.938937 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-jw7x4" podStartSLOduration=2.938912348 podStartE2EDuration="2.938912348s" podCreationTimestamp="2025-10-03 15:50:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:50:07.930916577 +0000 UTC m=+1326.896473190" watchObservedRunningTime="2025-10-03 15:50:07.938912348 +0000 UTC m=+1326.904468961" Oct 03 15:50:07 crc kubenswrapper[5081]: I1003 15:50:07.955772 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-s54wv" podStartSLOduration=2.955742776 podStartE2EDuration="2.955742776s" podCreationTimestamp="2025-10-03 15:50:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:50:07.946601611 +0000 UTC m=+1326.912158234" watchObservedRunningTime="2025-10-03 15:50:07.955742776 +0000 UTC m=+1326.921299379" Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.928882 5081 generic.go:334] "Generic (PLEG): container finished" podID="e5d75d32-d4c9-4f1c-b62d-f44a65f973a1" containerID="68e340c1501e9dc43aadc37d497b3228a7433d115bf05a6a47d62aedac45c029" exitCode=0 Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.928968 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jw7x4" event={"ID":"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1","Type":"ContainerDied","Data":"68e340c1501e9dc43aadc37d497b3228a7433d115bf05a6a47d62aedac45c029"} Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.931476 5081 generic.go:334] "Generic (PLEG): container finished" podID="fa9930e6-e39c-49d6-a13f-05abb8dce794" containerID="86505af40b69e0e0b9847b07e4658ae402a0c4c37caa4072e1ee5ae9768dc0f6" exitCode=0 Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.931506 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-s54wv" event={"ID":"fa9930e6-e39c-49d6-a13f-05abb8dce794","Type":"ContainerDied","Data":"86505af40b69e0e0b9847b07e4658ae402a0c4c37caa4072e1ee5ae9768dc0f6"} Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.934366 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8341a6f-4783-4bf6-916e-ac655208ba45","Type":"ContainerStarted","Data":"7893281dd7f6e155dae82597172d11e93702c9a53318c8564135c0043e49216e"} Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.937937 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed01e164-93b5-47c7-9ec0-7a00d6942c66","Type":"ContainerStarted","Data":"71bc762a04da349305dfb7aba1fc1915351db7ac6e3d40db700ab69ee2492adf"} Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.941205 5081 generic.go:334] "Generic (PLEG): container finished" podID="6e1d7200-8b2d-4cf8-be4a-7924fc195005" containerID="18db5cd9adfb101e2f6a73d479aac7968e4fd25e0bc69fc94066119176608f62" exitCode=0 Oct 03 15:50:08 crc kubenswrapper[5081]: I1003 15:50:08.941278 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-z6xfs" event={"ID":"6e1d7200-8b2d-4cf8-be4a-7924fc195005","Type":"ContainerDied","Data":"18db5cd9adfb101e2f6a73d479aac7968e4fd25e0bc69fc94066119176608f62"} Oct 03 15:50:09 crc kubenswrapper[5081]: I1003 15:50:09.006678 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.006656818 podStartE2EDuration="6.006656818s" podCreationTimestamp="2025-10-03 15:50:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:50:08.999580033 +0000 UTC m=+1327.965136656" watchObservedRunningTime="2025-10-03 15:50:09.006656818 +0000 UTC m=+1327.972213451" Oct 03 15:50:09 crc kubenswrapper[5081]: I1003 15:50:09.031109 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.031081895 podStartE2EDuration="5.031081895s" podCreationTimestamp="2025-10-03 15:50:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:50:09.027519232 +0000 UTC m=+1327.993075895" watchObservedRunningTime="2025-10-03 15:50:09.031081895 +0000 UTC m=+1327.996638548" Oct 03 15:50:09 crc kubenswrapper[5081]: I1003 15:50:09.954786 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerStarted","Data":"81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93"} Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.440170 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jw7x4" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.563387 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5v6x\" (UniqueName: \"kubernetes.io/projected/e5d75d32-d4c9-4f1c-b62d-f44a65f973a1-kube-api-access-v5v6x\") pod \"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1\" (UID: \"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1\") " Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.569308 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5d75d32-d4c9-4f1c-b62d-f44a65f973a1-kube-api-access-v5v6x" (OuterVolumeSpecName: "kube-api-access-v5v6x") pod "e5d75d32-d4c9-4f1c-b62d-f44a65f973a1" (UID: "e5d75d32-d4c9-4f1c-b62d-f44a65f973a1"). InnerVolumeSpecName "kube-api-access-v5v6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.571760 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-s54wv" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.628423 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z6xfs" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.665887 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdrbr\" (UniqueName: \"kubernetes.io/projected/fa9930e6-e39c-49d6-a13f-05abb8dce794-kube-api-access-bdrbr\") pod \"fa9930e6-e39c-49d6-a13f-05abb8dce794\" (UID: \"fa9930e6-e39c-49d6-a13f-05abb8dce794\") " Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.666437 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5v6x\" (UniqueName: \"kubernetes.io/projected/e5d75d32-d4c9-4f1c-b62d-f44a65f973a1-kube-api-access-v5v6x\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.669460 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa9930e6-e39c-49d6-a13f-05abb8dce794-kube-api-access-bdrbr" (OuterVolumeSpecName: "kube-api-access-bdrbr") pod "fa9930e6-e39c-49d6-a13f-05abb8dce794" (UID: "fa9930e6-e39c-49d6-a13f-05abb8dce794"). InnerVolumeSpecName "kube-api-access-bdrbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.770425 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmvxd\" (UniqueName: \"kubernetes.io/projected/6e1d7200-8b2d-4cf8-be4a-7924fc195005-kube-api-access-hmvxd\") pod \"6e1d7200-8b2d-4cf8-be4a-7924fc195005\" (UID: \"6e1d7200-8b2d-4cf8-be4a-7924fc195005\") " Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.771117 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdrbr\" (UniqueName: \"kubernetes.io/projected/fa9930e6-e39c-49d6-a13f-05abb8dce794-kube-api-access-bdrbr\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.774802 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e1d7200-8b2d-4cf8-be4a-7924fc195005-kube-api-access-hmvxd" (OuterVolumeSpecName: "kube-api-access-hmvxd") pod "6e1d7200-8b2d-4cf8-be4a-7924fc195005" (UID: "6e1d7200-8b2d-4cf8-be4a-7924fc195005"). InnerVolumeSpecName "kube-api-access-hmvxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.872792 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmvxd\" (UniqueName: \"kubernetes.io/projected/6e1d7200-8b2d-4cf8-be4a-7924fc195005-kube-api-access-hmvxd\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.963583 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jw7x4" event={"ID":"e5d75d32-d4c9-4f1c-b62d-f44a65f973a1","Type":"ContainerDied","Data":"945e44fbf8cf2043a6531a72c06de752726211ac6f1f45d8fcf8f77d4e5b5cb3"} Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.963621 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="945e44fbf8cf2043a6531a72c06de752726211ac6f1f45d8fcf8f77d4e5b5cb3" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.963675 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jw7x4" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.969691 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-s54wv" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.969699 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-s54wv" event={"ID":"fa9930e6-e39c-49d6-a13f-05abb8dce794","Type":"ContainerDied","Data":"a7e8abd06350e41514d5166b9bf215d1d61583452681732025e88f341ef7bd57"} Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.969743 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7e8abd06350e41514d5166b9bf215d1d61583452681732025e88f341ef7bd57" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.970777 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-z6xfs" event={"ID":"6e1d7200-8b2d-4cf8-be4a-7924fc195005","Type":"ContainerDied","Data":"ade8bfa212286876beaf7d9262ef15897fe9c6acbef01103df41d63c12e9fec3"} Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.970801 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ade8bfa212286876beaf7d9262ef15897fe9c6acbef01103df41d63c12e9fec3" Oct 03 15:50:10 crc kubenswrapper[5081]: I1003 15:50:10.970838 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-z6xfs" Oct 03 15:50:14 crc kubenswrapper[5081]: I1003 15:50:14.590068 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 15:50:14 crc kubenswrapper[5081]: I1003 15:50:14.590813 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 15:50:14 crc kubenswrapper[5081]: I1003 15:50:14.633724 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 15:50:14 crc kubenswrapper[5081]: I1003 15:50:14.633813 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 15:50:15 crc kubenswrapper[5081]: I1003 15:50:15.009247 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 15:50:15 crc kubenswrapper[5081]: I1003 15:50:15.009656 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 15:50:15 crc kubenswrapper[5081]: I1003 15:50:15.298770 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:15 crc kubenswrapper[5081]: I1003 15:50:15.298839 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:15 crc kubenswrapper[5081]: I1003 15:50:15.337173 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:15 crc kubenswrapper[5081]: I1003 15:50:15.353957 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:15 crc kubenswrapper[5081]: I1003 15:50:15.468125 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 15:50:16 crc kubenswrapper[5081]: I1003 15:50:16.020356 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:16 crc kubenswrapper[5081]: I1003 15:50:16.020406 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:16 crc kubenswrapper[5081]: I1003 15:50:16.994407 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 15:50:17 crc kubenswrapper[5081]: I1003 15:50:17.014805 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 15:50:18 crc kubenswrapper[5081]: I1003 15:50:18.039785 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerStarted","Data":"c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9"} Oct 03 15:50:18 crc kubenswrapper[5081]: I1003 15:50:18.212801 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:18 crc kubenswrapper[5081]: I1003 15:50:18.212927 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 15:50:18 crc kubenswrapper[5081]: I1003 15:50:18.351465 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 15:50:20 crc kubenswrapper[5081]: I1003 15:50:20.064983 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerStarted","Data":"9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520"} Oct 03 15:50:20 crc kubenswrapper[5081]: I1003 15:50:20.067043 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 15:50:20 crc kubenswrapper[5081]: I1003 15:50:20.065200 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="proxy-httpd" containerID="cri-o://9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520" gracePeriod=30 Oct 03 15:50:20 crc kubenswrapper[5081]: I1003 15:50:20.065235 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-notification-agent" containerID="cri-o://81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93" gracePeriod=30 Oct 03 15:50:20 crc kubenswrapper[5081]: I1003 15:50:20.065258 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="sg-core" containerID="cri-o://c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9" gracePeriod=30 Oct 03 15:50:20 crc kubenswrapper[5081]: I1003 15:50:20.065136 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-central-agent" containerID="cri-o://f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d" gracePeriod=30 Oct 03 15:50:20 crc kubenswrapper[5081]: I1003 15:50:20.090678 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=10.350048082 podStartE2EDuration="28.090662067s" podCreationTimestamp="2025-10-03 15:49:52 +0000 UTC" firstStartedPulling="2025-10-03 15:50:01.175242433 +0000 UTC m=+1320.140799046" lastFinishedPulling="2025-10-03 15:50:18.915856418 +0000 UTC m=+1337.881413031" observedRunningTime="2025-10-03 15:50:20.086594619 +0000 UTC m=+1339.052151282" watchObservedRunningTime="2025-10-03 15:50:20.090662067 +0000 UTC m=+1339.056218680" Oct 03 15:50:21 crc kubenswrapper[5081]: I1003 15:50:21.078683 5081 generic.go:334] "Generic (PLEG): container finished" podID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerID="9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520" exitCode=0 Oct 03 15:50:21 crc kubenswrapper[5081]: I1003 15:50:21.080664 5081 generic.go:334] "Generic (PLEG): container finished" podID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerID="c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9" exitCode=2 Oct 03 15:50:21 crc kubenswrapper[5081]: I1003 15:50:21.080779 5081 generic.go:334] "Generic (PLEG): container finished" podID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerID="f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d" exitCode=0 Oct 03 15:50:21 crc kubenswrapper[5081]: I1003 15:50:21.078789 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerDied","Data":"9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520"} Oct 03 15:50:21 crc kubenswrapper[5081]: I1003 15:50:21.081001 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerDied","Data":"c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9"} Oct 03 15:50:21 crc kubenswrapper[5081]: I1003 15:50:21.081098 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerDied","Data":"f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d"} Oct 03 15:50:22 crc kubenswrapper[5081]: E1003 15:50:22.409892 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda52ba0c3_df1f_4004_926a_4f7ac6ee1580.slice/crio-81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda52ba0c3_df1f_4004_926a_4f7ac6ee1580.slice/crio-conmon-81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93.scope\": RecentStats: unable to find data in memory cache]" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.685097 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.823584 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdfcf\" (UniqueName: \"kubernetes.io/projected/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-kube-api-access-zdfcf\") pod \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.823877 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-run-httpd\") pod \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824087 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-sg-core-conf-yaml\") pod \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824137 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-combined-ca-bundle\") pod \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824164 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-config-data\") pod \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824216 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-scripts\") pod \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824237 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-log-httpd\") pod \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\" (UID: \"a52ba0c3-df1f-4004-926a-4f7ac6ee1580\") " Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824281 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a52ba0c3-df1f-4004-926a-4f7ac6ee1580" (UID: "a52ba0c3-df1f-4004-926a-4f7ac6ee1580"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824615 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.824755 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a52ba0c3-df1f-4004-926a-4f7ac6ee1580" (UID: "a52ba0c3-df1f-4004-926a-4f7ac6ee1580"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.829769 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-scripts" (OuterVolumeSpecName: "scripts") pod "a52ba0c3-df1f-4004-926a-4f7ac6ee1580" (UID: "a52ba0c3-df1f-4004-926a-4f7ac6ee1580"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.842913 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-kube-api-access-zdfcf" (OuterVolumeSpecName: "kube-api-access-zdfcf") pod "a52ba0c3-df1f-4004-926a-4f7ac6ee1580" (UID: "a52ba0c3-df1f-4004-926a-4f7ac6ee1580"). InnerVolumeSpecName "kube-api-access-zdfcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.853339 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a52ba0c3-df1f-4004-926a-4f7ac6ee1580" (UID: "a52ba0c3-df1f-4004-926a-4f7ac6ee1580"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.890844 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a52ba0c3-df1f-4004-926a-4f7ac6ee1580" (UID: "a52ba0c3-df1f-4004-926a-4f7ac6ee1580"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.926540 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.926578 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.926587 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.926595 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.926605 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdfcf\" (UniqueName: \"kubernetes.io/projected/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-kube-api-access-zdfcf\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:22 crc kubenswrapper[5081]: I1003 15:50:22.933395 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-config-data" (OuterVolumeSpecName: "config-data") pod "a52ba0c3-df1f-4004-926a-4f7ac6ee1580" (UID: "a52ba0c3-df1f-4004-926a-4f7ac6ee1580"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.028469 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a52ba0c3-df1f-4004-926a-4f7ac6ee1580-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.098765 5081 generic.go:334] "Generic (PLEG): container finished" podID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerID="81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93" exitCode=0 Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.098814 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerDied","Data":"81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93"} Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.098847 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a52ba0c3-df1f-4004-926a-4f7ac6ee1580","Type":"ContainerDied","Data":"a512b355c92dcf95a7b4f2f75ef3ab1eb6e388ba88628692259d42a29f464404"} Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.098871 5081 scope.go:117] "RemoveContainer" containerID="9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.098885 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.118317 5081 scope.go:117] "RemoveContainer" containerID="c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.151295 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.163754 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.167212 5081 scope.go:117] "RemoveContainer" containerID="81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.176458 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177288 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="sg-core" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177322 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="sg-core" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177348 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-api" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177356 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-api" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177376 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="proxy-httpd" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177384 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="proxy-httpd" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177408 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-httpd" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177416 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-httpd" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177428 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d75d32-d4c9-4f1c-b62d-f44a65f973a1" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177436 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d75d32-d4c9-4f1c-b62d-f44a65f973a1" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177445 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9930e6-e39c-49d6-a13f-05abb8dce794" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177453 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9930e6-e39c-49d6-a13f-05abb8dce794" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177467 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e1d7200-8b2d-4cf8-be4a-7924fc195005" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177488 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e1d7200-8b2d-4cf8-be4a-7924fc195005" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177508 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-notification-agent" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177515 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-notification-agent" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.177529 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-central-agent" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177537 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-central-agent" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177801 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-notification-agent" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177817 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="ceilometer-central-agent" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177838 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-api" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177855 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa9930e6-e39c-49d6-a13f-05abb8dce794" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177869 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e1d7200-8b2d-4cf8-be4a-7924fc195005" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177882 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="sg-core" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177903 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="25fcab82-3148-4b1a-b54e-f8fe5b3c51c2" containerName="neutron-httpd" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177915 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5d75d32-d4c9-4f1c-b62d-f44a65f973a1" containerName="mariadb-database-create" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.177924 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" containerName="proxy-httpd" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.180262 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.182651 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.182843 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.192929 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.202764 5081 scope.go:117] "RemoveContainer" containerID="f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.229520 5081 scope.go:117] "RemoveContainer" containerID="9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.230217 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520\": container with ID starting with 9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520 not found: ID does not exist" containerID="9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.230261 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520"} err="failed to get container status \"9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520\": rpc error: code = NotFound desc = could not find container \"9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520\": container with ID starting with 9602a70833853504efa5e01bb998d2c96e7c9b07f5c2cbdb348e08991b034520 not found: ID does not exist" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.230287 5081 scope.go:117] "RemoveContainer" containerID="c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.230638 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9\": container with ID starting with c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9 not found: ID does not exist" containerID="c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.230694 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9"} err="failed to get container status \"c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9\": rpc error: code = NotFound desc = could not find container \"c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9\": container with ID starting with c9f998e5a234e48b2b25e231777b9f41c5a4ca6b499377bed9e56ef08e0231c9 not found: ID does not exist" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.230728 5081 scope.go:117] "RemoveContainer" containerID="81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.231013 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93\": container with ID starting with 81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93 not found: ID does not exist" containerID="81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.231044 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93"} err="failed to get container status \"81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93\": rpc error: code = NotFound desc = could not find container \"81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93\": container with ID starting with 81961fea35a365a7c78cea83005e5a584881cab6e5475e7337d76a04fd597d93 not found: ID does not exist" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.231068 5081 scope.go:117] "RemoveContainer" containerID="f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d" Oct 03 15:50:23 crc kubenswrapper[5081]: E1003 15:50:23.231350 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d\": container with ID starting with f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d not found: ID does not exist" containerID="f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.231386 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d"} err="failed to get container status \"f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d\": rpc error: code = NotFound desc = could not find container \"f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d\": container with ID starting with f299a0a9d622fc7ca66e09cfabe2d05b51a37b28c6910cf0fe19fb40eefec99d not found: ID does not exist" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.333105 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-run-httpd\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.333191 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.333382 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp4n5\" (UniqueName: \"kubernetes.io/projected/637b297e-fffd-4ab0-bf29-3def6dbe2da1-kube-api-access-zp4n5\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.333483 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-scripts\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.333754 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.334429 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-log-httpd\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.337431 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-config-data\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446279 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-log-httpd\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446347 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-config-data\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446392 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-run-httpd\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446434 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446459 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp4n5\" (UniqueName: \"kubernetes.io/projected/637b297e-fffd-4ab0-bf29-3def6dbe2da1-kube-api-access-zp4n5\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446477 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-scripts\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446519 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.446867 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-log-httpd\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.452171 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.452476 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-run-httpd\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.456619 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-scripts\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.458342 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-config-data\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.458510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.463907 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp4n5\" (UniqueName: \"kubernetes.io/projected/637b297e-fffd-4ab0-bf29-3def6dbe2da1-kube-api-access-zp4n5\") pod \"ceilometer-0\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.500653 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.841381 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a52ba0c3-df1f-4004-926a-4f7ac6ee1580" path="/var/lib/kubelet/pods/a52ba0c3-df1f-4004-926a-4f7ac6ee1580/volumes" Oct 03 15:50:23 crc kubenswrapper[5081]: I1003 15:50:23.971586 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:24 crc kubenswrapper[5081]: I1003 15:50:24.110178 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerStarted","Data":"a4245f0323283290e8b486b53eebc004e65e960cb1b1110909d5fbf9bfde9e59"} Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.121473 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerStarted","Data":"9fd7c4205a4a05cb8577517061e76119ae8bddb896bb64d87053d0df8027eab3"} Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.140526 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-c1ef-account-create-k8xb5"] Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.141988 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c1ef-account-create-k8xb5" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.144318 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.153285 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c1ef-account-create-k8xb5"] Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.281204 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5zgl\" (UniqueName: \"kubernetes.io/projected/8b0f63e5-fce3-4604-9212-2d951a310c81-kube-api-access-r5zgl\") pod \"nova-api-c1ef-account-create-k8xb5\" (UID: \"8b0f63e5-fce3-4604-9212-2d951a310c81\") " pod="openstack/nova-api-c1ef-account-create-k8xb5" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.357151 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-49e1-account-create-9cvsx"] Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.358580 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-49e1-account-create-9cvsx" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.366734 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.393354 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-49e1-account-create-9cvsx"] Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.394422 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5zgl\" (UniqueName: \"kubernetes.io/projected/8b0f63e5-fce3-4604-9212-2d951a310c81-kube-api-access-r5zgl\") pod \"nova-api-c1ef-account-create-k8xb5\" (UID: \"8b0f63e5-fce3-4604-9212-2d951a310c81\") " pod="openstack/nova-api-c1ef-account-create-k8xb5" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.433893 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5zgl\" (UniqueName: \"kubernetes.io/projected/8b0f63e5-fce3-4604-9212-2d951a310c81-kube-api-access-r5zgl\") pod \"nova-api-c1ef-account-create-k8xb5\" (UID: \"8b0f63e5-fce3-4604-9212-2d951a310c81\") " pod="openstack/nova-api-c1ef-account-create-k8xb5" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.469222 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c1ef-account-create-k8xb5" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.496112 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99lds\" (UniqueName: \"kubernetes.io/projected/5aeecca0-1628-4103-ba8b-5dcf3bb564e3-kube-api-access-99lds\") pod \"nova-cell0-49e1-account-create-9cvsx\" (UID: \"5aeecca0-1628-4103-ba8b-5dcf3bb564e3\") " pod="openstack/nova-cell0-49e1-account-create-9cvsx" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.538991 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-c347-account-create-qnb2t"] Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.540365 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c347-account-create-qnb2t" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.544219 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.549902 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c347-account-create-qnb2t"] Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.598095 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99lds\" (UniqueName: \"kubernetes.io/projected/5aeecca0-1628-4103-ba8b-5dcf3bb564e3-kube-api-access-99lds\") pod \"nova-cell0-49e1-account-create-9cvsx\" (UID: \"5aeecca0-1628-4103-ba8b-5dcf3bb564e3\") " pod="openstack/nova-cell0-49e1-account-create-9cvsx" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.619228 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99lds\" (UniqueName: \"kubernetes.io/projected/5aeecca0-1628-4103-ba8b-5dcf3bb564e3-kube-api-access-99lds\") pod \"nova-cell0-49e1-account-create-9cvsx\" (UID: \"5aeecca0-1628-4103-ba8b-5dcf3bb564e3\") " pod="openstack/nova-cell0-49e1-account-create-9cvsx" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.700458 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hwkv\" (UniqueName: \"kubernetes.io/projected/3ab8a197-adc0-43a7-b416-8d4770b1b646-kube-api-access-5hwkv\") pod \"nova-cell1-c347-account-create-qnb2t\" (UID: \"3ab8a197-adc0-43a7-b416-8d4770b1b646\") " pod="openstack/nova-cell1-c347-account-create-qnb2t" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.770337 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-49e1-account-create-9cvsx" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.802837 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hwkv\" (UniqueName: \"kubernetes.io/projected/3ab8a197-adc0-43a7-b416-8d4770b1b646-kube-api-access-5hwkv\") pod \"nova-cell1-c347-account-create-qnb2t\" (UID: \"3ab8a197-adc0-43a7-b416-8d4770b1b646\") " pod="openstack/nova-cell1-c347-account-create-qnb2t" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.822236 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hwkv\" (UniqueName: \"kubernetes.io/projected/3ab8a197-adc0-43a7-b416-8d4770b1b646-kube-api-access-5hwkv\") pod \"nova-cell1-c347-account-create-qnb2t\" (UID: \"3ab8a197-adc0-43a7-b416-8d4770b1b646\") " pod="openstack/nova-cell1-c347-account-create-qnb2t" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.969669 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c347-account-create-qnb2t" Oct 03 15:50:25 crc kubenswrapper[5081]: I1003 15:50:25.976448 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c1ef-account-create-k8xb5"] Oct 03 15:50:26 crc kubenswrapper[5081]: I1003 15:50:26.134248 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c1ef-account-create-k8xb5" event={"ID":"8b0f63e5-fce3-4604-9212-2d951a310c81","Type":"ContainerStarted","Data":"d6530da6c7c2ba3fa189b455c22dff697caeb17501d208dab4c9469da4d61204"} Oct 03 15:50:26 crc kubenswrapper[5081]: I1003 15:50:26.140577 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerStarted","Data":"f103d52a2df7e925b259c9b34913f41cda32f9c3fb12f74369c19454c456afe9"} Oct 03 15:50:26 crc kubenswrapper[5081]: I1003 15:50:26.248378 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-49e1-account-create-9cvsx"] Oct 03 15:50:26 crc kubenswrapper[5081]: I1003 15:50:26.276857 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c347-account-create-qnb2t"] Oct 03 15:50:26 crc kubenswrapper[5081]: W1003 15:50:26.326636 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5aeecca0_1628_4103_ba8b_5dcf3bb564e3.slice/crio-f2d329cdc0b642c562325abc1288328b6dee3ac61afff84f4f55ff0f34d9523d WatchSource:0}: Error finding container f2d329cdc0b642c562325abc1288328b6dee3ac61afff84f4f55ff0f34d9523d: Status 404 returned error can't find the container with id f2d329cdc0b642c562325abc1288328b6dee3ac61afff84f4f55ff0f34d9523d Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.150696 5081 generic.go:334] "Generic (PLEG): container finished" podID="3ab8a197-adc0-43a7-b416-8d4770b1b646" containerID="73a7229a6dda8683762ca148305b6ad3a681e613e0f9cdd6e8995d83174cb3b3" exitCode=0 Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.151060 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c347-account-create-qnb2t" event={"ID":"3ab8a197-adc0-43a7-b416-8d4770b1b646","Type":"ContainerDied","Data":"73a7229a6dda8683762ca148305b6ad3a681e613e0f9cdd6e8995d83174cb3b3"} Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.151087 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c347-account-create-qnb2t" event={"ID":"3ab8a197-adc0-43a7-b416-8d4770b1b646","Type":"ContainerStarted","Data":"56b263339fb44ed5f67d13ca6c46a55f014455c957f0251b26f8ff641c6d0e08"} Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.154038 5081 generic.go:334] "Generic (PLEG): container finished" podID="5aeecca0-1628-4103-ba8b-5dcf3bb564e3" containerID="83d1bd011ea30374ecdb406909cb245479e6b28e66c4b2423fdfa0f4b32232e7" exitCode=0 Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.154129 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-49e1-account-create-9cvsx" event={"ID":"5aeecca0-1628-4103-ba8b-5dcf3bb564e3","Type":"ContainerDied","Data":"83d1bd011ea30374ecdb406909cb245479e6b28e66c4b2423fdfa0f4b32232e7"} Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.154158 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-49e1-account-create-9cvsx" event={"ID":"5aeecca0-1628-4103-ba8b-5dcf3bb564e3","Type":"ContainerStarted","Data":"f2d329cdc0b642c562325abc1288328b6dee3ac61afff84f4f55ff0f34d9523d"} Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.157268 5081 generic.go:334] "Generic (PLEG): container finished" podID="8b0f63e5-fce3-4604-9212-2d951a310c81" containerID="8efa7cfbd3c0d58f4e81f012bf8245d9e47c0f18b0af8a5a13e7f5e6027ad8a3" exitCode=0 Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.157403 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c1ef-account-create-k8xb5" event={"ID":"8b0f63e5-fce3-4604-9212-2d951a310c81","Type":"ContainerDied","Data":"8efa7cfbd3c0d58f4e81f012bf8245d9e47c0f18b0af8a5a13e7f5e6027ad8a3"} Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.160717 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerStarted","Data":"d21cdb79c55bc0602af3223469dfd019f84274da8806adde3ed8a3237d25a038"} Oct 03 15:50:27 crc kubenswrapper[5081]: I1003 15:50:27.379632 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.172488 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerStarted","Data":"85a574f5c3f4d8a324158bca5c0cfc64a8553cff287eed562bece986ff5da2c5"} Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.172834 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-central-agent" containerID="cri-o://9fd7c4205a4a05cb8577517061e76119ae8bddb896bb64d87053d0df8027eab3" gracePeriod=30 Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.172836 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="proxy-httpd" containerID="cri-o://85a574f5c3f4d8a324158bca5c0cfc64a8553cff287eed562bece986ff5da2c5" gracePeriod=30 Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.172838 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="sg-core" containerID="cri-o://d21cdb79c55bc0602af3223469dfd019f84274da8806adde3ed8a3237d25a038" gracePeriod=30 Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.172855 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-notification-agent" containerID="cri-o://f103d52a2df7e925b259c9b34913f41cda32f9c3fb12f74369c19454c456afe9" gracePeriod=30 Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.194628 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.51523839 podStartE2EDuration="5.194605961s" podCreationTimestamp="2025-10-03 15:50:23 +0000 UTC" firstStartedPulling="2025-10-03 15:50:23.981121138 +0000 UTC m=+1342.946677751" lastFinishedPulling="2025-10-03 15:50:27.660488699 +0000 UTC m=+1346.626045322" observedRunningTime="2025-10-03 15:50:28.190655396 +0000 UTC m=+1347.156212019" watchObservedRunningTime="2025-10-03 15:50:28.194605961 +0000 UTC m=+1347.160162574" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.555383 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-49e1-account-create-9cvsx" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.666079 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c347-account-create-qnb2t" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.667655 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c1ef-account-create-k8xb5" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.677597 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99lds\" (UniqueName: \"kubernetes.io/projected/5aeecca0-1628-4103-ba8b-5dcf3bb564e3-kube-api-access-99lds\") pod \"5aeecca0-1628-4103-ba8b-5dcf3bb564e3\" (UID: \"5aeecca0-1628-4103-ba8b-5dcf3bb564e3\") " Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.692500 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aeecca0-1628-4103-ba8b-5dcf3bb564e3-kube-api-access-99lds" (OuterVolumeSpecName: "kube-api-access-99lds") pod "5aeecca0-1628-4103-ba8b-5dcf3bb564e3" (UID: "5aeecca0-1628-4103-ba8b-5dcf3bb564e3"). InnerVolumeSpecName "kube-api-access-99lds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.779500 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hwkv\" (UniqueName: \"kubernetes.io/projected/3ab8a197-adc0-43a7-b416-8d4770b1b646-kube-api-access-5hwkv\") pod \"3ab8a197-adc0-43a7-b416-8d4770b1b646\" (UID: \"3ab8a197-adc0-43a7-b416-8d4770b1b646\") " Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.779614 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5zgl\" (UniqueName: \"kubernetes.io/projected/8b0f63e5-fce3-4604-9212-2d951a310c81-kube-api-access-r5zgl\") pod \"8b0f63e5-fce3-4604-9212-2d951a310c81\" (UID: \"8b0f63e5-fce3-4604-9212-2d951a310c81\") " Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.780015 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99lds\" (UniqueName: \"kubernetes.io/projected/5aeecca0-1628-4103-ba8b-5dcf3bb564e3-kube-api-access-99lds\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.784659 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab8a197-adc0-43a7-b416-8d4770b1b646-kube-api-access-5hwkv" (OuterVolumeSpecName: "kube-api-access-5hwkv") pod "3ab8a197-adc0-43a7-b416-8d4770b1b646" (UID: "3ab8a197-adc0-43a7-b416-8d4770b1b646"). InnerVolumeSpecName "kube-api-access-5hwkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.785157 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b0f63e5-fce3-4604-9212-2d951a310c81-kube-api-access-r5zgl" (OuterVolumeSpecName: "kube-api-access-r5zgl") pod "8b0f63e5-fce3-4604-9212-2d951a310c81" (UID: "8b0f63e5-fce3-4604-9212-2d951a310c81"). InnerVolumeSpecName "kube-api-access-r5zgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.882169 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hwkv\" (UniqueName: \"kubernetes.io/projected/3ab8a197-adc0-43a7-b416-8d4770b1b646-kube-api-access-5hwkv\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:28 crc kubenswrapper[5081]: I1003 15:50:28.882223 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5zgl\" (UniqueName: \"kubernetes.io/projected/8b0f63e5-fce3-4604-9212-2d951a310c81-kube-api-access-r5zgl\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.183837 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-49e1-account-create-9cvsx" event={"ID":"5aeecca0-1628-4103-ba8b-5dcf3bb564e3","Type":"ContainerDied","Data":"f2d329cdc0b642c562325abc1288328b6dee3ac61afff84f4f55ff0f34d9523d"} Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.183885 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2d329cdc0b642c562325abc1288328b6dee3ac61afff84f4f55ff0f34d9523d" Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.183952 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-49e1-account-create-9cvsx" Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.199817 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c1ef-account-create-k8xb5" Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.200189 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c1ef-account-create-k8xb5" event={"ID":"8b0f63e5-fce3-4604-9212-2d951a310c81","Type":"ContainerDied","Data":"d6530da6c7c2ba3fa189b455c22dff697caeb17501d208dab4c9469da4d61204"} Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.200229 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6530da6c7c2ba3fa189b455c22dff697caeb17501d208dab4c9469da4d61204" Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.203263 5081 generic.go:334] "Generic (PLEG): container finished" podID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerID="85a574f5c3f4d8a324158bca5c0cfc64a8553cff287eed562bece986ff5da2c5" exitCode=0 Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.203294 5081 generic.go:334] "Generic (PLEG): container finished" podID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerID="d21cdb79c55bc0602af3223469dfd019f84274da8806adde3ed8a3237d25a038" exitCode=2 Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.203304 5081 generic.go:334] "Generic (PLEG): container finished" podID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerID="f103d52a2df7e925b259c9b34913f41cda32f9c3fb12f74369c19454c456afe9" exitCode=0 Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.203352 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerDied","Data":"85a574f5c3f4d8a324158bca5c0cfc64a8553cff287eed562bece986ff5da2c5"} Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.203387 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerDied","Data":"d21cdb79c55bc0602af3223469dfd019f84274da8806adde3ed8a3237d25a038"} Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.203398 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerDied","Data":"f103d52a2df7e925b259c9b34913f41cda32f9c3fb12f74369c19454c456afe9"} Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.206322 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c347-account-create-qnb2t" event={"ID":"3ab8a197-adc0-43a7-b416-8d4770b1b646","Type":"ContainerDied","Data":"56b263339fb44ed5f67d13ca6c46a55f014455c957f0251b26f8ff641c6d0e08"} Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.206498 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56b263339fb44ed5f67d13ca6c46a55f014455c957f0251b26f8ff641c6d0e08" Oct 03 15:50:29 crc kubenswrapper[5081]: I1003 15:50:29.206773 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c347-account-create-qnb2t" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.573129 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fmhsw"] Oct 03 15:50:30 crc kubenswrapper[5081]: E1003 15:50:30.573998 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ab8a197-adc0-43a7-b416-8d4770b1b646" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.574017 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ab8a197-adc0-43a7-b416-8d4770b1b646" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: E1003 15:50:30.574040 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b0f63e5-fce3-4604-9212-2d951a310c81" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.574048 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b0f63e5-fce3-4604-9212-2d951a310c81" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: E1003 15:50:30.574063 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aeecca0-1628-4103-ba8b-5dcf3bb564e3" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.574071 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aeecca0-1628-4103-ba8b-5dcf3bb564e3" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.574310 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aeecca0-1628-4103-ba8b-5dcf3bb564e3" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.574354 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b0f63e5-fce3-4604-9212-2d951a310c81" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.574368 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ab8a197-adc0-43a7-b416-8d4770b1b646" containerName="mariadb-account-create" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.575105 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.577230 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-config-data\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.577291 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-scripts\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.577326 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.577342 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvddp\" (UniqueName: \"kubernetes.io/projected/5d4651e0-3f61-4f14-8239-326b5203caf5-kube-api-access-wvddp\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.577954 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.578073 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-rz2tr" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.578986 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.586281 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fmhsw"] Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.647580 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.647834 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.678755 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-config-data\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.678949 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-scripts\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.679035 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.679076 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvddp\" (UniqueName: \"kubernetes.io/projected/5d4651e0-3f61-4f14-8239-326b5203caf5-kube-api-access-wvddp\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.684336 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.684775 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-scripts\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.696367 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-config-data\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.697089 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvddp\" (UniqueName: \"kubernetes.io/projected/5d4651e0-3f61-4f14-8239-326b5203caf5-kube-api-access-wvddp\") pod \"nova-cell0-conductor-db-sync-fmhsw\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:30 crc kubenswrapper[5081]: I1003 15:50:30.903120 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:50:31 crc kubenswrapper[5081]: I1003 15:50:31.426845 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fmhsw"] Oct 03 15:50:32 crc kubenswrapper[5081]: I1003 15:50:32.232400 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" event={"ID":"5d4651e0-3f61-4f14-8239-326b5203caf5","Type":"ContainerStarted","Data":"20c9ebe89b41aa6fe4973832e8ceccfdab795ab16e2de91f2dc98ec30bc58064"} Oct 03 15:50:35 crc kubenswrapper[5081]: I1003 15:50:35.270891 5081 generic.go:334] "Generic (PLEG): container finished" podID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerID="9fd7c4205a4a05cb8577517061e76119ae8bddb896bb64d87053d0df8027eab3" exitCode=0 Oct 03 15:50:35 crc kubenswrapper[5081]: I1003 15:50:35.270989 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerDied","Data":"9fd7c4205a4a05cb8577517061e76119ae8bddb896bb64d87053d0df8027eab3"} Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.263292 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.350866 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"637b297e-fffd-4ab0-bf29-3def6dbe2da1","Type":"ContainerDied","Data":"a4245f0323283290e8b486b53eebc004e65e960cb1b1110909d5fbf9bfde9e59"} Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.350925 5081 scope.go:117] "RemoveContainer" containerID="85a574f5c3f4d8a324158bca5c0cfc64a8553cff287eed562bece986ff5da2c5" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.350938 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.398945 5081 scope.go:117] "RemoveContainer" containerID="d21cdb79c55bc0602af3223469dfd019f84274da8806adde3ed8a3237d25a038" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.425598 5081 scope.go:117] "RemoveContainer" containerID="f103d52a2df7e925b259c9b34913f41cda32f9c3fb12f74369c19454c456afe9" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.452195 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-sg-core-conf-yaml\") pod \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.452260 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-log-httpd\") pod \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.452353 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-combined-ca-bundle\") pod \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.452390 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-run-httpd\") pod \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.452410 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-config-data\") pod \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.452461 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp4n5\" (UniqueName: \"kubernetes.io/projected/637b297e-fffd-4ab0-bf29-3def6dbe2da1-kube-api-access-zp4n5\") pod \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.453136 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "637b297e-fffd-4ab0-bf29-3def6dbe2da1" (UID: "637b297e-fffd-4ab0-bf29-3def6dbe2da1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.453276 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-scripts\") pod \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\" (UID: \"637b297e-fffd-4ab0-bf29-3def6dbe2da1\") " Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.453389 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "637b297e-fffd-4ab0-bf29-3def6dbe2da1" (UID: "637b297e-fffd-4ab0-bf29-3def6dbe2da1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.453758 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.453782 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/637b297e-fffd-4ab0-bf29-3def6dbe2da1-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.457781 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-scripts" (OuterVolumeSpecName: "scripts") pod "637b297e-fffd-4ab0-bf29-3def6dbe2da1" (UID: "637b297e-fffd-4ab0-bf29-3def6dbe2da1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.457830 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/637b297e-fffd-4ab0-bf29-3def6dbe2da1-kube-api-access-zp4n5" (OuterVolumeSpecName: "kube-api-access-zp4n5") pod "637b297e-fffd-4ab0-bf29-3def6dbe2da1" (UID: "637b297e-fffd-4ab0-bf29-3def6dbe2da1"). InnerVolumeSpecName "kube-api-access-zp4n5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.458874 5081 scope.go:117] "RemoveContainer" containerID="9fd7c4205a4a05cb8577517061e76119ae8bddb896bb64d87053d0df8027eab3" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.479091 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "637b297e-fffd-4ab0-bf29-3def6dbe2da1" (UID: "637b297e-fffd-4ab0-bf29-3def6dbe2da1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.536291 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "637b297e-fffd-4ab0-bf29-3def6dbe2da1" (UID: "637b297e-fffd-4ab0-bf29-3def6dbe2da1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.555450 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-config-data" (OuterVolumeSpecName: "config-data") pod "637b297e-fffd-4ab0-bf29-3def6dbe2da1" (UID: "637b297e-fffd-4ab0-bf29-3def6dbe2da1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.555827 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.555851 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.555863 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp4n5\" (UniqueName: \"kubernetes.io/projected/637b297e-fffd-4ab0-bf29-3def6dbe2da1-kube-api-access-zp4n5\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.555874 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.555885 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/637b297e-fffd-4ab0-bf29-3def6dbe2da1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.686210 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.694430 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.717181 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:39 crc kubenswrapper[5081]: E1003 15:50:39.717880 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="proxy-httpd" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.717906 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="proxy-httpd" Oct 03 15:50:39 crc kubenswrapper[5081]: E1003 15:50:39.717945 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-central-agent" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.717955 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-central-agent" Oct 03 15:50:39 crc kubenswrapper[5081]: E1003 15:50:39.717972 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-notification-agent" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.717981 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-notification-agent" Oct 03 15:50:39 crc kubenswrapper[5081]: E1003 15:50:39.718002 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="sg-core" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.718009 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="sg-core" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.718209 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="sg-core" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.718238 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-notification-agent" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.718250 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="ceilometer-central-agent" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.718260 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" containerName="proxy-httpd" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.720652 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.723902 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.723911 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.725650 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.840809 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="637b297e-fffd-4ab0-bf29-3def6dbe2da1" path="/var/lib/kubelet/pods/637b297e-fffd-4ab0-bf29-3def6dbe2da1/volumes" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.860516 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-run-httpd\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.860870 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.861061 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-log-httpd\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.861174 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.861221 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-scripts\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.861255 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-config-data\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.861282 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrqrl\" (UniqueName: \"kubernetes.io/projected/19c01283-ab95-4be2-8fdb-7948cfce70e4-kube-api-access-hrqrl\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.962979 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-config-data\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.963886 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrqrl\" (UniqueName: \"kubernetes.io/projected/19c01283-ab95-4be2-8fdb-7948cfce70e4-kube-api-access-hrqrl\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.963955 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-run-httpd\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.964077 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.964114 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-log-httpd\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.964170 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.964219 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-scripts\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.964492 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-run-httpd\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.964639 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-log-httpd\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.968264 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.975436 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-config-data\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.976985 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.978254 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-scripts\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:39 crc kubenswrapper[5081]: I1003 15:50:39.979911 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrqrl\" (UniqueName: \"kubernetes.io/projected/19c01283-ab95-4be2-8fdb-7948cfce70e4-kube-api-access-hrqrl\") pod \"ceilometer-0\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " pod="openstack/ceilometer-0" Oct 03 15:50:40 crc kubenswrapper[5081]: I1003 15:50:40.053009 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:50:40 crc kubenswrapper[5081]: I1003 15:50:40.362131 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" event={"ID":"5d4651e0-3f61-4f14-8239-326b5203caf5","Type":"ContainerStarted","Data":"d76846d0dd605e9d375a82084d0b21e2838e242828180e38a07f9443254f2aa1"} Oct 03 15:50:40 crc kubenswrapper[5081]: I1003 15:50:40.384248 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" podStartSLOduration=2.530468737 podStartE2EDuration="10.384213685s" podCreationTimestamp="2025-10-03 15:50:30 +0000 UTC" firstStartedPulling="2025-10-03 15:50:31.432219743 +0000 UTC m=+1350.397776356" lastFinishedPulling="2025-10-03 15:50:39.285964691 +0000 UTC m=+1358.251521304" observedRunningTime="2025-10-03 15:50:40.376879754 +0000 UTC m=+1359.342436387" watchObservedRunningTime="2025-10-03 15:50:40.384213685 +0000 UTC m=+1359.349770318" Oct 03 15:50:40 crc kubenswrapper[5081]: I1003 15:50:40.538637 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:40 crc kubenswrapper[5081]: W1003 15:50:40.549460 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19c01283_ab95_4be2_8fdb_7948cfce70e4.slice/crio-6d264089a0c5db12147c742cd6ef46bf03e00683528a315a99e5ed00f992f046 WatchSource:0}: Error finding container 6d264089a0c5db12147c742cd6ef46bf03e00683528a315a99e5ed00f992f046: Status 404 returned error can't find the container with id 6d264089a0c5db12147c742cd6ef46bf03e00683528a315a99e5ed00f992f046 Oct 03 15:50:40 crc kubenswrapper[5081]: I1003 15:50:40.552273 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 15:50:40 crc kubenswrapper[5081]: I1003 15:50:40.658508 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:50:41 crc kubenswrapper[5081]: I1003 15:50:41.373752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerStarted","Data":"6d264089a0c5db12147c742cd6ef46bf03e00683528a315a99e5ed00f992f046"} Oct 03 15:50:42 crc kubenswrapper[5081]: I1003 15:50:42.381948 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerStarted","Data":"67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30"} Oct 03 15:50:44 crc kubenswrapper[5081]: I1003 15:50:44.401489 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerStarted","Data":"55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4"} Oct 03 15:50:48 crc kubenswrapper[5081]: I1003 15:50:48.440698 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerStarted","Data":"906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436"} Oct 03 15:50:49 crc kubenswrapper[5081]: I1003 15:50:49.459221 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerStarted","Data":"d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662"} Oct 03 15:50:49 crc kubenswrapper[5081]: I1003 15:50:49.459394 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-central-agent" containerID="cri-o://67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30" gracePeriod=30 Oct 03 15:50:49 crc kubenswrapper[5081]: I1003 15:50:49.459704 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 15:50:49 crc kubenswrapper[5081]: I1003 15:50:49.460001 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="proxy-httpd" containerID="cri-o://d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662" gracePeriod=30 Oct 03 15:50:49 crc kubenswrapper[5081]: I1003 15:50:49.460044 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="sg-core" containerID="cri-o://906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436" gracePeriod=30 Oct 03 15:50:49 crc kubenswrapper[5081]: I1003 15:50:49.460076 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-notification-agent" containerID="cri-o://55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4" gracePeriod=30 Oct 03 15:50:49 crc kubenswrapper[5081]: I1003 15:50:49.479692 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.846791699 podStartE2EDuration="10.479678107s" podCreationTimestamp="2025-10-03 15:50:39 +0000 UTC" firstStartedPulling="2025-10-03 15:50:40.551716789 +0000 UTC m=+1359.517273402" lastFinishedPulling="2025-10-03 15:50:49.184603167 +0000 UTC m=+1368.150159810" observedRunningTime="2025-10-03 15:50:49.47732494 +0000 UTC m=+1368.442881553" watchObservedRunningTime="2025-10-03 15:50:49.479678107 +0000 UTC m=+1368.445234720" Oct 03 15:50:50 crc kubenswrapper[5081]: I1003 15:50:50.469431 5081 generic.go:334] "Generic (PLEG): container finished" podID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerID="906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436" exitCode=2 Oct 03 15:50:50 crc kubenswrapper[5081]: I1003 15:50:50.469919 5081 generic.go:334] "Generic (PLEG): container finished" podID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerID="55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4" exitCode=0 Oct 03 15:50:50 crc kubenswrapper[5081]: I1003 15:50:50.469647 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerDied","Data":"906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436"} Oct 03 15:50:50 crc kubenswrapper[5081]: I1003 15:50:50.469961 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerDied","Data":"55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4"} Oct 03 15:50:52 crc kubenswrapper[5081]: I1003 15:50:52.489253 5081 generic.go:334] "Generic (PLEG): container finished" podID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerID="67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30" exitCode=0 Oct 03 15:50:52 crc kubenswrapper[5081]: I1003 15:50:52.489336 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerDied","Data":"67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30"} Oct 03 15:51:00 crc kubenswrapper[5081]: I1003 15:51:00.647848 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:51:00 crc kubenswrapper[5081]: I1003 15:51:00.648585 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:51:00 crc kubenswrapper[5081]: I1003 15:51:00.648635 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:51:00 crc kubenswrapper[5081]: I1003 15:51:00.649429 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f5f972a5d9ee4f18b25fc20005ef3fc5efd95afd61322fe1547661c45157b16"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:51:00 crc kubenswrapper[5081]: I1003 15:51:00.649483 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://1f5f972a5d9ee4f18b25fc20005ef3fc5efd95afd61322fe1547661c45157b16" gracePeriod=600 Oct 03 15:51:01 crc kubenswrapper[5081]: I1003 15:51:01.578442 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="1f5f972a5d9ee4f18b25fc20005ef3fc5efd95afd61322fe1547661c45157b16" exitCode=0 Oct 03 15:51:01 crc kubenswrapper[5081]: I1003 15:51:01.578551 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"1f5f972a5d9ee4f18b25fc20005ef3fc5efd95afd61322fe1547661c45157b16"} Oct 03 15:51:01 crc kubenswrapper[5081]: I1003 15:51:01.578867 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87"} Oct 03 15:51:01 crc kubenswrapper[5081]: I1003 15:51:01.578890 5081 scope.go:117] "RemoveContainer" containerID="2727e3568c0236fe42cc24253d506d8b40838dfef4a67a28b07229e9c0fbc979" Oct 03 15:51:02 crc kubenswrapper[5081]: I1003 15:51:02.592230 5081 generic.go:334] "Generic (PLEG): container finished" podID="5d4651e0-3f61-4f14-8239-326b5203caf5" containerID="d76846d0dd605e9d375a82084d0b21e2838e242828180e38a07f9443254f2aa1" exitCode=0 Oct 03 15:51:02 crc kubenswrapper[5081]: I1003 15:51:02.592305 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" event={"ID":"5d4651e0-3f61-4f14-8239-326b5203caf5","Type":"ContainerDied","Data":"d76846d0dd605e9d375a82084d0b21e2838e242828180e38a07f9443254f2aa1"} Oct 03 15:51:03 crc kubenswrapper[5081]: I1003 15:51:03.907589 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.045012 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvddp\" (UniqueName: \"kubernetes.io/projected/5d4651e0-3f61-4f14-8239-326b5203caf5-kube-api-access-wvddp\") pod \"5d4651e0-3f61-4f14-8239-326b5203caf5\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.045075 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-scripts\") pod \"5d4651e0-3f61-4f14-8239-326b5203caf5\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.045176 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-combined-ca-bundle\") pod \"5d4651e0-3f61-4f14-8239-326b5203caf5\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.045263 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-config-data\") pod \"5d4651e0-3f61-4f14-8239-326b5203caf5\" (UID: \"5d4651e0-3f61-4f14-8239-326b5203caf5\") " Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.053707 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-scripts" (OuterVolumeSpecName: "scripts") pod "5d4651e0-3f61-4f14-8239-326b5203caf5" (UID: "5d4651e0-3f61-4f14-8239-326b5203caf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.054119 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d4651e0-3f61-4f14-8239-326b5203caf5-kube-api-access-wvddp" (OuterVolumeSpecName: "kube-api-access-wvddp") pod "5d4651e0-3f61-4f14-8239-326b5203caf5" (UID: "5d4651e0-3f61-4f14-8239-326b5203caf5"). InnerVolumeSpecName "kube-api-access-wvddp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.082051 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d4651e0-3f61-4f14-8239-326b5203caf5" (UID: "5d4651e0-3f61-4f14-8239-326b5203caf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.129142 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-config-data" (OuterVolumeSpecName: "config-data") pod "5d4651e0-3f61-4f14-8239-326b5203caf5" (UID: "5d4651e0-3f61-4f14-8239-326b5203caf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.149213 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.149262 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvddp\" (UniqueName: \"kubernetes.io/projected/5d4651e0-3f61-4f14-8239-326b5203caf5-kube-api-access-wvddp\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.149280 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.149294 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4651e0-3f61-4f14-8239-326b5203caf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.613351 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" event={"ID":"5d4651e0-3f61-4f14-8239-326b5203caf5","Type":"ContainerDied","Data":"20c9ebe89b41aa6fe4973832e8ceccfdab795ab16e2de91f2dc98ec30bc58064"} Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.613410 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20c9ebe89b41aa6fe4973832e8ceccfdab795ab16e2de91f2dc98ec30bc58064" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.613475 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fmhsw" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.770538 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 15:51:04 crc kubenswrapper[5081]: E1003 15:51:04.771013 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d4651e0-3f61-4f14-8239-326b5203caf5" containerName="nova-cell0-conductor-db-sync" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.771031 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d4651e0-3f61-4f14-8239-326b5203caf5" containerName="nova-cell0-conductor-db-sync" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.771287 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d4651e0-3f61-4f14-8239-326b5203caf5" containerName="nova-cell0-conductor-db-sync" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.772233 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.774967 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-rz2tr" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.774968 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.801631 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.861735 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.862190 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.862239 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsfcz\" (UniqueName: \"kubernetes.io/projected/dfe2dfd3-8b6d-466a-92f5-68e649d31298-kube-api-access-tsfcz\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.963740 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.963810 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsfcz\" (UniqueName: \"kubernetes.io/projected/dfe2dfd3-8b6d-466a-92f5-68e649d31298-kube-api-access-tsfcz\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.963952 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.968648 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.984546 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsfcz\" (UniqueName: \"kubernetes.io/projected/dfe2dfd3-8b6d-466a-92f5-68e649d31298-kube-api-access-tsfcz\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:04 crc kubenswrapper[5081]: I1003 15:51:04.986179 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:05 crc kubenswrapper[5081]: I1003 15:51:05.094738 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:05 crc kubenswrapper[5081]: I1003 15:51:05.551056 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 15:51:05 crc kubenswrapper[5081]: I1003 15:51:05.627589 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"dfe2dfd3-8b6d-466a-92f5-68e649d31298","Type":"ContainerStarted","Data":"1a13d75e73c7a71c393b87f168bb6a1bf93b244e383b010bffcb23865d895ac4"} Oct 03 15:51:06 crc kubenswrapper[5081]: I1003 15:51:06.639428 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"dfe2dfd3-8b6d-466a-92f5-68e649d31298","Type":"ContainerStarted","Data":"bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db"} Oct 03 15:51:06 crc kubenswrapper[5081]: I1003 15:51:06.639870 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:06 crc kubenswrapper[5081]: I1003 15:51:06.660342 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.660324554 podStartE2EDuration="2.660324554s" podCreationTimestamp="2025-10-03 15:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:06.654157367 +0000 UTC m=+1385.619713990" watchObservedRunningTime="2025-10-03 15:51:06.660324554 +0000 UTC m=+1385.625881167" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.078279 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.129345 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.666044 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-9zm9c"] Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.667289 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.669996 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.670269 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.670511 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-config-data\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.670620 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.670701 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-scripts\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.670745 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvwsd\" (UniqueName: \"kubernetes.io/projected/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-kube-api-access-wvwsd\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.729285 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9zm9c"] Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.772807 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-config-data\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.772858 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.772895 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-scripts\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.772927 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvwsd\" (UniqueName: \"kubernetes.io/projected/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-kube-api-access-wvwsd\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.780599 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-scripts\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.803366 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.805024 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-config-data\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.821302 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvwsd\" (UniqueName: \"kubernetes.io/projected/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-kube-api-access-wvwsd\") pod \"nova-cell0-cell-mapping-9zm9c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.886138 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.895176 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.901525 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.949720 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.982295 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.985658 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.988702 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 15:51:10 crc kubenswrapper[5081]: I1003 15:51:10.991238 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.000919 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.033488 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.036756 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.041264 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.043531 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.082634 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh4vv\" (UniqueName: \"kubernetes.io/projected/2dba3895-f647-424c-bef4-7d25827a0343-kube-api-access-wh4vv\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.082681 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.082716 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-logs\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.082828 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.082962 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.083068 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-config-data\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.083192 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqsqg\" (UniqueName: \"kubernetes.io/projected/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-kube-api-access-mqsqg\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.151629 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.153254 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.157447 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.185994 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4s75\" (UniqueName: \"kubernetes.io/projected/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-kube-api-access-v4s75\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186055 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-config-data\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186116 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqsqg\" (UniqueName: \"kubernetes.io/projected/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-kube-api-access-mqsqg\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186192 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-config-data\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186248 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh4vv\" (UniqueName: \"kubernetes.io/projected/2dba3895-f647-424c-bef4-7d25827a0343-kube-api-access-wh4vv\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186276 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186311 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-logs\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186333 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186410 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.186438 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.187845 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-logs\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.194442 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.194465 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.199946 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-config-data\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.204889 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.219692 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.237722 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh4vv\" (UniqueName: \"kubernetes.io/projected/2dba3895-f647-424c-bef4-7d25827a0343-kube-api-access-wh4vv\") pod \"nova-cell1-novncproxy-0\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.243506 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqsqg\" (UniqueName: \"kubernetes.io/projected/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-kube-api-access-mqsqg\") pod \"nova-api-0\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.279019 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66d94ddf6f-mws9m"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.281326 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.289501 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwg54\" (UniqueName: \"kubernetes.io/projected/6b811a31-d0b5-4c76-9515-d733e1480221-kube-api-access-rwg54\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.289589 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.289651 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.289712 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4s75\" (UniqueName: \"kubernetes.io/projected/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-kube-api-access-v4s75\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.289754 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-config-data\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.289869 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-config-data\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.289981 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b811a31-d0b5-4c76-9515-d733e1480221-logs\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.292840 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66d94ddf6f-mws9m"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.293951 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.302054 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-config-data\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.312021 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.317653 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4s75\" (UniqueName: \"kubernetes.io/projected/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-kube-api-access-v4s75\") pod \"nova-scheduler-0\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392048 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b811a31-d0b5-4c76-9515-d733e1480221-logs\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392127 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwg54\" (UniqueName: \"kubernetes.io/projected/6b811a31-d0b5-4c76-9515-d733e1480221-kube-api-access-rwg54\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392157 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392232 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-config-data\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392283 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-svc\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392311 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-nb\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392345 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-config\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392370 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-swift-storage-0\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392386 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-sb\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392415 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhpbj\" (UniqueName: \"kubernetes.io/projected/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-kube-api-access-fhpbj\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.392916 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b811a31-d0b5-4c76-9515-d733e1480221-logs\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.396974 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.397021 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-config-data\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.411259 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwg54\" (UniqueName: \"kubernetes.io/projected/6b811a31-d0b5-4c76-9515-d733e1480221-kube-api-access-rwg54\") pod \"nova-metadata-0\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.433516 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.479893 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.494213 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-svc\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.494288 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-nb\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.494349 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-config\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.494376 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-swift-storage-0\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.494392 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-sb\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.494448 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhpbj\" (UniqueName: \"kubernetes.io/projected/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-kube-api-access-fhpbj\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.495342 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-svc\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.498123 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-config\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.508264 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-swift-storage-0\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.509464 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-sb\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.510700 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-nb\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.520358 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhpbj\" (UniqueName: \"kubernetes.io/projected/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-kube-api-access-fhpbj\") pod \"dnsmasq-dns-66d94ddf6f-mws9m\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.531331 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.609606 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.643308 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9zm9c"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.705487 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9zm9c" event={"ID":"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c","Type":"ContainerStarted","Data":"b5b7758a77e08cb74b824e3f5ceae401c33438339242fcf9757617207cbabc9c"} Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.776800 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:11 crc kubenswrapper[5081]: I1003 15:51:11.954358 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.049417 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jcbmh"] Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.050695 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.054347 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.054669 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.064072 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jcbmh"] Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.073876 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.152353 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.207472 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc7dw\" (UniqueName: \"kubernetes.io/projected/0487cd14-638b-44f4-9154-d5d57307f0bd-kube-api-access-pc7dw\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.207519 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-config-data\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.207655 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-scripts\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.207759 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.225853 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66d94ddf6f-mws9m"] Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.309612 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-config-data\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.309691 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-scripts\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.309788 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.309889 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc7dw\" (UniqueName: \"kubernetes.io/projected/0487cd14-638b-44f4-9154-d5d57307f0bd-kube-api-access-pc7dw\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.315814 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-config-data\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.316951 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.317011 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-scripts\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.327063 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc7dw\" (UniqueName: \"kubernetes.io/projected/0487cd14-638b-44f4-9154-d5d57307f0bd-kube-api-access-pc7dw\") pod \"nova-cell1-conductor-db-sync-jcbmh\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.373089 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.714801 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2dba3895-f647-424c-bef4-7d25827a0343","Type":"ContainerStarted","Data":"899bbc4cd1b351dab196c0652fe12ed8bb6f77cb55b7894f7127b042eb4dfcd8"} Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.717011 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b811a31-d0b5-4c76-9515-d733e1480221","Type":"ContainerStarted","Data":"edbaaecd51dba0f7822f6520dce9828b030e4da489152c7d580baa223a81d2a9"} Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.718464 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399","Type":"ContainerStarted","Data":"96e5315e434534af15ce25aaea7c7bcc355a000bfe2e4a39fdbd051752694b8b"} Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.719704 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" event={"ID":"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7","Type":"ContainerStarted","Data":"ad1da3570a5f822c14ef8d114b524bd5a183f23fa3c7c3a20c089c4dd6128e4e"} Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.720580 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bbb392-2039-46d8-8719-98ef8e4c5f1b","Type":"ContainerStarted","Data":"1877dde39605820ef5e7d6cc9b37ca89f603285b3d7f84db358b274a3d6bec36"} Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.721735 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9zm9c" event={"ID":"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c","Type":"ContainerStarted","Data":"03a0d5e5f0d7d2e3256428e9dfbde99cc9dffb0c85d0c20febe5d5a351f19d27"} Oct 03 15:51:12 crc kubenswrapper[5081]: I1003 15:51:12.899227 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jcbmh"] Oct 03 15:51:13 crc kubenswrapper[5081]: I1003 15:51:13.746056 5081 generic.go:334] "Generic (PLEG): container finished" podID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerID="9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0" exitCode=0 Oct 03 15:51:13 crc kubenswrapper[5081]: I1003 15:51:13.746249 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" event={"ID":"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7","Type":"ContainerDied","Data":"9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0"} Oct 03 15:51:13 crc kubenswrapper[5081]: I1003 15:51:13.750514 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" event={"ID":"0487cd14-638b-44f4-9154-d5d57307f0bd","Type":"ContainerStarted","Data":"2742fc690da949e9d6cb940b865585ee6b2e33a45f3f8c6d67539be2bb0ca78b"} Oct 03 15:51:13 crc kubenswrapper[5081]: I1003 15:51:13.750800 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" event={"ID":"0487cd14-638b-44f4-9154-d5d57307f0bd","Type":"ContainerStarted","Data":"af80061eec5e9ec6f6732ff52f72bc0ef96a4e348b9da62d2d52b65ecab53cf6"} Oct 03 15:51:13 crc kubenswrapper[5081]: I1003 15:51:13.792713 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" podStartSLOduration=1.792690624 podStartE2EDuration="1.792690624s" podCreationTimestamp="2025-10-03 15:51:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:13.780453203 +0000 UTC m=+1392.746009816" watchObservedRunningTime="2025-10-03 15:51:13.792690624 +0000 UTC m=+1392.758247237" Oct 03 15:51:13 crc kubenswrapper[5081]: I1003 15:51:13.804534 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-9zm9c" podStartSLOduration=3.804514364 podStartE2EDuration="3.804514364s" podCreationTimestamp="2025-10-03 15:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:13.797444571 +0000 UTC m=+1392.763001194" watchObservedRunningTime="2025-10-03 15:51:13.804514364 +0000 UTC m=+1392.770070977" Oct 03 15:51:14 crc kubenswrapper[5081]: I1003 15:51:14.736030 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:14 crc kubenswrapper[5081]: I1003 15:51:14.747994 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:16 crc kubenswrapper[5081]: I1003 15:51:16.792999 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" event={"ID":"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7","Type":"ContainerStarted","Data":"00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed"} Oct 03 15:51:16 crc kubenswrapper[5081]: I1003 15:51:16.793717 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:16 crc kubenswrapper[5081]: I1003 15:51:16.814097 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" podStartSLOduration=5.8140810819999995 podStartE2EDuration="5.814081082s" podCreationTimestamp="2025-10-03 15:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:16.813129434 +0000 UTC m=+1395.778686087" watchObservedRunningTime="2025-10-03 15:51:16.814081082 +0000 UTC m=+1395.779637695" Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.812075 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bbb392-2039-46d8-8719-98ef8e4c5f1b","Type":"ContainerStarted","Data":"5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979"} Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.818434 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2dba3895-f647-424c-bef4-7d25827a0343","Type":"ContainerStarted","Data":"79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225"} Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.818494 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="2dba3895-f647-424c-bef4-7d25827a0343" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225" gracePeriod=30 Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.831595 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.967184739 podStartE2EDuration="7.831541074s" podCreationTimestamp="2025-10-03 15:51:10 +0000 UTC" firstStartedPulling="2025-10-03 15:51:12.256637788 +0000 UTC m=+1391.222194401" lastFinishedPulling="2025-10-03 15:51:17.120994123 +0000 UTC m=+1396.086550736" observedRunningTime="2025-10-03 15:51:17.830999849 +0000 UTC m=+1396.796556462" watchObservedRunningTime="2025-10-03 15:51:17.831541074 +0000 UTC m=+1396.797097697" Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.851727 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.048504096 podStartE2EDuration="7.851710514s" podCreationTimestamp="2025-10-03 15:51:10 +0000 UTC" firstStartedPulling="2025-10-03 15:51:12.254505466 +0000 UTC m=+1391.220062089" lastFinishedPulling="2025-10-03 15:51:17.057711904 +0000 UTC m=+1396.023268507" observedRunningTime="2025-10-03 15:51:17.846481554 +0000 UTC m=+1396.812038187" watchObservedRunningTime="2025-10-03 15:51:17.851710514 +0000 UTC m=+1396.817267127" Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.857092 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b811a31-d0b5-4c76-9515-d733e1480221","Type":"ContainerStarted","Data":"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976"} Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.857142 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b811a31-d0b5-4c76-9515-d733e1480221","Type":"ContainerStarted","Data":"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa"} Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.857158 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399","Type":"ContainerStarted","Data":"331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2"} Oct 03 15:51:17 crc kubenswrapper[5081]: I1003 15:51:17.857170 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399","Type":"ContainerStarted","Data":"283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be"} Oct 03 15:51:18 crc kubenswrapper[5081]: I1003 15:51:18.852013 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-log" containerID="cri-o://7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa" gracePeriod=30 Oct 03 15:51:18 crc kubenswrapper[5081]: I1003 15:51:18.852076 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-metadata" containerID="cri-o://576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976" gracePeriod=30 Oct 03 15:51:18 crc kubenswrapper[5081]: I1003 15:51:18.872827 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.353156744 podStartE2EDuration="7.872811812s" podCreationTimestamp="2025-10-03 15:51:11 +0000 UTC" firstStartedPulling="2025-10-03 15:51:12.267307554 +0000 UTC m=+1391.232864167" lastFinishedPulling="2025-10-03 15:51:16.786962622 +0000 UTC m=+1395.752519235" observedRunningTime="2025-10-03 15:51:18.870221427 +0000 UTC m=+1397.835778040" watchObservedRunningTime="2025-10-03 15:51:18.872811812 +0000 UTC m=+1397.838368415" Oct 03 15:51:18 crc kubenswrapper[5081]: I1003 15:51:18.898744 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.460020884 podStartE2EDuration="8.898719116s" podCreationTimestamp="2025-10-03 15:51:10 +0000 UTC" firstStartedPulling="2025-10-03 15:51:12.34304384 +0000 UTC m=+1391.308600443" lastFinishedPulling="2025-10-03 15:51:16.781742062 +0000 UTC m=+1395.747298675" observedRunningTime="2025-10-03 15:51:18.889895463 +0000 UTC m=+1397.855452086" watchObservedRunningTime="2025-10-03 15:51:18.898719116 +0000 UTC m=+1397.864275749" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.422062 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:19 crc kubenswrapper[5081]: W1003 15:51:19.507127 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod723e589e_9d8f_46cc_92e0_ff8cc9b34ab7.slice/crio-conmon-9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod723e589e_9d8f_46cc_92e0_ff8cc9b34ab7.slice/crio-conmon-9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0.scope: no such file or directory Oct 03 15:51:19 crc kubenswrapper[5081]: W1003 15:51:19.507414 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod723e589e_9d8f_46cc_92e0_ff8cc9b34ab7.slice/crio-9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod723e589e_9d8f_46cc_92e0_ff8cc9b34ab7.slice/crio-9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0.scope: no such file or directory Oct 03 15:51:19 crc kubenswrapper[5081]: W1003 15:51:19.511903 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-conmon-7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-conmon-7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa.scope: no such file or directory Oct 03 15:51:19 crc kubenswrapper[5081]: W1003 15:51:19.516214 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa.scope: no such file or directory Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.582412 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-combined-ca-bundle\") pod \"6b811a31-d0b5-4c76-9515-d733e1480221\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.582684 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b811a31-d0b5-4c76-9515-d733e1480221-logs\") pod \"6b811a31-d0b5-4c76-9515-d733e1480221\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.582754 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-config-data\") pod \"6b811a31-d0b5-4c76-9515-d733e1480221\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.582806 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwg54\" (UniqueName: \"kubernetes.io/projected/6b811a31-d0b5-4c76-9515-d733e1480221-kube-api-access-rwg54\") pod \"6b811a31-d0b5-4c76-9515-d733e1480221\" (UID: \"6b811a31-d0b5-4c76-9515-d733e1480221\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.583292 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b811a31-d0b5-4c76-9515-d733e1480221-logs" (OuterVolumeSpecName: "logs") pod "6b811a31-d0b5-4c76-9515-d733e1480221" (UID: "6b811a31-d0b5-4c76-9515-d733e1480221"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.584703 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b811a31-d0b5-4c76-9515-d733e1480221-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.590297 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b811a31-d0b5-4c76-9515-d733e1480221-kube-api-access-rwg54" (OuterVolumeSpecName: "kube-api-access-rwg54") pod "6b811a31-d0b5-4c76-9515-d733e1480221" (UID: "6b811a31-d0b5-4c76-9515-d733e1480221"). InnerVolumeSpecName "kube-api-access-rwg54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.620334 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-config-data" (OuterVolumeSpecName: "config-data") pod "6b811a31-d0b5-4c76-9515-d733e1480221" (UID: "6b811a31-d0b5-4c76-9515-d733e1480221"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.621737 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b811a31-d0b5-4c76-9515-d733e1480221" (UID: "6b811a31-d0b5-4c76-9515-d733e1480221"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.687298 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.687348 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwg54\" (UniqueName: \"kubernetes.io/projected/6b811a31-d0b5-4c76-9515-d733e1480221-kube-api-access-rwg54\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.687362 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b811a31-d0b5-4c76-9515-d733e1480221-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.844074 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.876684 5081 generic.go:334] "Generic (PLEG): container finished" podID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerID="d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662" exitCode=137 Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.876853 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.877678 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerDied","Data":"d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662"} Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.877706 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19c01283-ab95-4be2-8fdb-7948cfce70e4","Type":"ContainerDied","Data":"6d264089a0c5db12147c742cd6ef46bf03e00683528a315a99e5ed00f992f046"} Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.877726 5081 scope.go:117] "RemoveContainer" containerID="d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.885311 5081 generic.go:334] "Generic (PLEG): container finished" podID="6b811a31-d0b5-4c76-9515-d733e1480221" containerID="576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976" exitCode=0 Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.885341 5081 generic.go:334] "Generic (PLEG): container finished" podID="6b811a31-d0b5-4c76-9515-d733e1480221" containerID="7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa" exitCode=143 Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.885365 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b811a31-d0b5-4c76-9515-d733e1480221","Type":"ContainerDied","Data":"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976"} Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.885394 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b811a31-d0b5-4c76-9515-d733e1480221","Type":"ContainerDied","Data":"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa"} Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.885409 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b811a31-d0b5-4c76-9515-d733e1480221","Type":"ContainerDied","Data":"edbaaecd51dba0f7822f6520dce9828b030e4da489152c7d580baa223a81d2a9"} Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.885474 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.925167 5081 scope.go:117] "RemoveContainer" containerID="906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.940389 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.960809 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.966657 5081 scope.go:117] "RemoveContainer" containerID="55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.971093 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:19 crc kubenswrapper[5081]: E1003 15:51:19.971844 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-central-agent" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.971865 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-central-agent" Oct 03 15:51:19 crc kubenswrapper[5081]: E1003 15:51:19.971901 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="sg-core" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.971910 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="sg-core" Oct 03 15:51:19 crc kubenswrapper[5081]: E1003 15:51:19.971944 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-log" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.971950 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-log" Oct 03 15:51:19 crc kubenswrapper[5081]: E1003 15:51:19.971957 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-metadata" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.971963 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-metadata" Oct 03 15:51:19 crc kubenswrapper[5081]: E1003 15:51:19.971978 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-notification-agent" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.971984 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-notification-agent" Oct 03 15:51:19 crc kubenswrapper[5081]: E1003 15:51:19.971996 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="proxy-httpd" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.972001 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="proxy-httpd" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.972169 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="sg-core" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.972181 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-central-agent" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.972193 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="ceilometer-notification-agent" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.972210 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-metadata" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.972218 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" containerName="proxy-httpd" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.972227 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" containerName="nova-metadata-log" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.973410 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.975612 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.976694 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.981655 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.992695 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-log-httpd\") pod \"19c01283-ab95-4be2-8fdb-7948cfce70e4\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.993165 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-scripts\") pod \"19c01283-ab95-4be2-8fdb-7948cfce70e4\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.993249 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-config-data\") pod \"19c01283-ab95-4be2-8fdb-7948cfce70e4\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.993281 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-run-httpd\") pod \"19c01283-ab95-4be2-8fdb-7948cfce70e4\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.993340 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrqrl\" (UniqueName: \"kubernetes.io/projected/19c01283-ab95-4be2-8fdb-7948cfce70e4-kube-api-access-hrqrl\") pod \"19c01283-ab95-4be2-8fdb-7948cfce70e4\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.993389 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-sg-core-conf-yaml\") pod \"19c01283-ab95-4be2-8fdb-7948cfce70e4\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.993413 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-combined-ca-bundle\") pod \"19c01283-ab95-4be2-8fdb-7948cfce70e4\" (UID: \"19c01283-ab95-4be2-8fdb-7948cfce70e4\") " Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.993455 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "19c01283-ab95-4be2-8fdb-7948cfce70e4" (UID: "19c01283-ab95-4be2-8fdb-7948cfce70e4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.994049 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.994310 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "19c01283-ab95-4be2-8fdb-7948cfce70e4" (UID: "19c01283-ab95-4be2-8fdb-7948cfce70e4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.996898 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19c01283-ab95-4be2-8fdb-7948cfce70e4-kube-api-access-hrqrl" (OuterVolumeSpecName: "kube-api-access-hrqrl") pod "19c01283-ab95-4be2-8fdb-7948cfce70e4" (UID: "19c01283-ab95-4be2-8fdb-7948cfce70e4"). InnerVolumeSpecName "kube-api-access-hrqrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:19 crc kubenswrapper[5081]: I1003 15:51:19.999033 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-scripts" (OuterVolumeSpecName: "scripts") pod "19c01283-ab95-4be2-8fdb-7948cfce70e4" (UID: "19c01283-ab95-4be2-8fdb-7948cfce70e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.024741 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "19c01283-ab95-4be2-8fdb-7948cfce70e4" (UID: "19c01283-ab95-4be2-8fdb-7948cfce70e4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.068618 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19c01283-ab95-4be2-8fdb-7948cfce70e4" (UID: "19c01283-ab95-4be2-8fdb-7948cfce70e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.087489 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-config-data" (OuterVolumeSpecName: "config-data") pod "19c01283-ab95-4be2-8fdb-7948cfce70e4" (UID: "19c01283-ab95-4be2-8fdb-7948cfce70e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.095654 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-config-data\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.095748 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.095815 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-logs\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.095852 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.096093 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8dts\" (UniqueName: \"kubernetes.io/projected/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-kube-api-access-c8dts\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.096306 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.096325 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.096364 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19c01283-ab95-4be2-8fdb-7948cfce70e4-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.096378 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrqrl\" (UniqueName: \"kubernetes.io/projected/19c01283-ab95-4be2-8fdb-7948cfce70e4-kube-api-access-hrqrl\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.096389 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.096399 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19c01283-ab95-4be2-8fdb-7948cfce70e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.181848 5081 scope.go:117] "RemoveContainer" containerID="67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.200385 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-config-data\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.200459 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.200517 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-logs\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.200547 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.200590 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8dts\" (UniqueName: \"kubernetes.io/projected/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-kube-api-access-c8dts\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.201307 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-logs\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.210718 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-config-data\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.217250 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.220504 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.220853 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8dts\" (UniqueName: \"kubernetes.io/projected/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-kube-api-access-c8dts\") pod \"nova-metadata-0\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.223101 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.230197 5081 scope.go:117] "RemoveContainer" containerID="d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662" Oct 03 15:51:20 crc kubenswrapper[5081]: E1003 15:51:20.230841 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662\": container with ID starting with d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662 not found: ID does not exist" containerID="d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.230871 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662"} err="failed to get container status \"d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662\": rpc error: code = NotFound desc = could not find container \"d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662\": container with ID starting with d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662 not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.230893 5081 scope.go:117] "RemoveContainer" containerID="906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436" Oct 03 15:51:20 crc kubenswrapper[5081]: E1003 15:51:20.231132 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436\": container with ID starting with 906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436 not found: ID does not exist" containerID="906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.231153 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436"} err="failed to get container status \"906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436\": rpc error: code = NotFound desc = could not find container \"906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436\": container with ID starting with 906aeab7a5c3f4c23dd0a4d65903ae98bf73f06644ccd63c6a38db687a1bf436 not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.231165 5081 scope.go:117] "RemoveContainer" containerID="55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4" Oct 03 15:51:20 crc kubenswrapper[5081]: E1003 15:51:20.231343 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4\": container with ID starting with 55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4 not found: ID does not exist" containerID="55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.231362 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4"} err="failed to get container status \"55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4\": rpc error: code = NotFound desc = could not find container \"55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4\": container with ID starting with 55b14f35a2fb52366d74e81bd0fee79539bb5ada28e71385406e03b2a1dd34b4 not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.231374 5081 scope.go:117] "RemoveContainer" containerID="67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30" Oct 03 15:51:20 crc kubenswrapper[5081]: E1003 15:51:20.231779 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30\": container with ID starting with 67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30 not found: ID does not exist" containerID="67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.231799 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30"} err="failed to get container status \"67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30\": rpc error: code = NotFound desc = could not find container \"67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30\": container with ID starting with 67050098f2ea5ba2472109512397553dff98f3e11b7dd7c9b2359685bff89b30 not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.231811 5081 scope.go:117] "RemoveContainer" containerID="576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.232905 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.262086 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.265671 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.269806 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.270004 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.278127 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.290005 5081 scope.go:117] "RemoveContainer" containerID="7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.377704 5081 scope.go:117] "RemoveContainer" containerID="576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976" Oct 03 15:51:20 crc kubenswrapper[5081]: E1003 15:51:20.378197 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976\": container with ID starting with 576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976 not found: ID does not exist" containerID="576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.378247 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976"} err="failed to get container status \"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976\": rpc error: code = NotFound desc = could not find container \"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976\": container with ID starting with 576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976 not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.378275 5081 scope.go:117] "RemoveContainer" containerID="7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa" Oct 03 15:51:20 crc kubenswrapper[5081]: E1003 15:51:20.379113 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa\": container with ID starting with 7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa not found: ID does not exist" containerID="7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.379142 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa"} err="failed to get container status \"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa\": rpc error: code = NotFound desc = could not find container \"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa\": container with ID starting with 7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.379156 5081 scope.go:117] "RemoveContainer" containerID="576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.379391 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976"} err="failed to get container status \"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976\": rpc error: code = NotFound desc = could not find container \"576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976\": container with ID starting with 576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976 not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.379413 5081 scope.go:117] "RemoveContainer" containerID="7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.379760 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa"} err="failed to get container status \"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa\": rpc error: code = NotFound desc = could not find container \"7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa\": container with ID starting with 7fa102612b48b7c2b5ab375058f68e0a26ba78a683b8d6a9d2c6bbb3751605fa not found: ID does not exist" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.403990 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqxfb\" (UniqueName: \"kubernetes.io/projected/fc280ae3-abda-44e0-8a70-01165eb826fd-kube-api-access-hqxfb\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.404090 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-scripts\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.404130 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-config-data\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.404187 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.404210 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-run-httpd\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.404229 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.404274 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-log-httpd\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.485106 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.506956 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-log-httpd\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.507434 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqxfb\" (UniqueName: \"kubernetes.io/projected/fc280ae3-abda-44e0-8a70-01165eb826fd-kube-api-access-hqxfb\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.507512 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-scripts\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.507616 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-config-data\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.507748 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.507803 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-run-httpd\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.508634 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-run-httpd\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.508982 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-log-httpd\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.510427 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.513738 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-scripts\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.513943 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-config-data\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.515795 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.517265 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.541346 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqxfb\" (UniqueName: \"kubernetes.io/projected/fc280ae3-abda-44e0-8a70-01165eb826fd-kube-api-access-hqxfb\") pod \"ceilometer-0\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.671474 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.898351 5081 generic.go:334] "Generic (PLEG): container finished" podID="b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" containerID="03a0d5e5f0d7d2e3256428e9dfbde99cc9dffb0c85d0c20febe5d5a351f19d27" exitCode=0 Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.898431 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9zm9c" event={"ID":"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c","Type":"ContainerDied","Data":"03a0d5e5f0d7d2e3256428e9dfbde99cc9dffb0c85d0c20febe5d5a351f19d27"} Oct 03 15:51:20 crc kubenswrapper[5081]: I1003 15:51:20.942748 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.124442 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.312545 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.434567 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.434611 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.462088 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.532340 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.532382 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.614779 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.689343 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77d8d5886f-sw2hj"] Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.690290 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" podUID="32b06890-1db0-4586-8127-0be88c3d6e42" containerName="dnsmasq-dns" containerID="cri-o://c14bf9fdfb6fe02b68250b4260c28613b3a6d7323e3eb76068e4d36b0f430635" gracePeriod=10 Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.841005 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19c01283-ab95-4be2-8fdb-7948cfce70e4" path="/var/lib/kubelet/pods/19c01283-ab95-4be2-8fdb-7948cfce70e4/volumes" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.848026 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b811a31-d0b5-4c76-9515-d733e1480221" path="/var/lib/kubelet/pods/6b811a31-d0b5-4c76-9515-d733e1480221/volumes" Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.923003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerStarted","Data":"ac01462b1ca622ec2946a71a2de8be24d616f76afa7a6eb285398b4e85510c73"} Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.925993 5081 generic.go:334] "Generic (PLEG): container finished" podID="32b06890-1db0-4586-8127-0be88c3d6e42" containerID="c14bf9fdfb6fe02b68250b4260c28613b3a6d7323e3eb76068e4d36b0f430635" exitCode=0 Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.926084 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" event={"ID":"32b06890-1db0-4586-8127-0be88c3d6e42","Type":"ContainerDied","Data":"c14bf9fdfb6fe02b68250b4260c28613b3a6d7323e3eb76068e4d36b0f430635"} Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.929975 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e","Type":"ContainerStarted","Data":"aba14457cf689c0824711ed187d7d0f81415153d40fbc73ca0af560182d0dc74"} Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.930015 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e","Type":"ContainerStarted","Data":"af441244db1245314e57b2639473068e1b7f451198fdc928f202487aa67bc1e2"} Oct 03 15:51:21 crc kubenswrapper[5081]: I1003 15:51:21.974828 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.505957 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.520326 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.568598 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvwsd\" (UniqueName: \"kubernetes.io/projected/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-kube-api-access-wvwsd\") pod \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.569197 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-scripts\") pod \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.569248 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-combined-ca-bundle\") pod \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.569407 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-config-data\") pod \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\" (UID: \"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.597477 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-scripts" (OuterVolumeSpecName: "scripts") pod "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" (UID: "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.599597 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-kube-api-access-wvwsd" (OuterVolumeSpecName: "kube-api-access-wvwsd") pod "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" (UID: "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c"). InnerVolumeSpecName "kube-api-access-wvwsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.605851 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" (UID: "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.622705 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-config-data" (OuterVolumeSpecName: "config-data") pod "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" (UID: "b9b4deda-b48b-4c00-8a09-fab4b2e7f95c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.624994 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.625072 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.673465 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2xqb\" (UniqueName: \"kubernetes.io/projected/32b06890-1db0-4586-8127-0be88c3d6e42-kube-api-access-n2xqb\") pod \"32b06890-1db0-4586-8127-0be88c3d6e42\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.673536 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-sb\") pod \"32b06890-1db0-4586-8127-0be88c3d6e42\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.673696 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-nb\") pod \"32b06890-1db0-4586-8127-0be88c3d6e42\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.673735 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-swift-storage-0\") pod \"32b06890-1db0-4586-8127-0be88c3d6e42\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.673805 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-config\") pod \"32b06890-1db0-4586-8127-0be88c3d6e42\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.673828 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-svc\") pod \"32b06890-1db0-4586-8127-0be88c3d6e42\" (UID: \"32b06890-1db0-4586-8127-0be88c3d6e42\") " Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.674243 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvwsd\" (UniqueName: \"kubernetes.io/projected/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-kube-api-access-wvwsd\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.674260 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.674270 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.674281 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.680618 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32b06890-1db0-4586-8127-0be88c3d6e42-kube-api-access-n2xqb" (OuterVolumeSpecName: "kube-api-access-n2xqb") pod "32b06890-1db0-4586-8127-0be88c3d6e42" (UID: "32b06890-1db0-4586-8127-0be88c3d6e42"). InnerVolumeSpecName "kube-api-access-n2xqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.730230 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "32b06890-1db0-4586-8127-0be88c3d6e42" (UID: "32b06890-1db0-4586-8127-0be88c3d6e42"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.742059 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "32b06890-1db0-4586-8127-0be88c3d6e42" (UID: "32b06890-1db0-4586-8127-0be88c3d6e42"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.762408 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-config" (OuterVolumeSpecName: "config") pod "32b06890-1db0-4586-8127-0be88c3d6e42" (UID: "32b06890-1db0-4586-8127-0be88c3d6e42"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.768786 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "32b06890-1db0-4586-8127-0be88c3d6e42" (UID: "32b06890-1db0-4586-8127-0be88c3d6e42"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.776898 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.776939 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2xqb\" (UniqueName: \"kubernetes.io/projected/32b06890-1db0-4586-8127-0be88c3d6e42-kube-api-access-n2xqb\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.776955 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.776968 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.777015 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.797132 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "32b06890-1db0-4586-8127-0be88c3d6e42" (UID: "32b06890-1db0-4586-8127-0be88c3d6e42"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.878310 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32b06890-1db0-4586-8127-0be88c3d6e42-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.940984 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.942010 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d8d5886f-sw2hj" event={"ID":"32b06890-1db0-4586-8127-0be88c3d6e42","Type":"ContainerDied","Data":"f6fe3e07305c0a00b0ab846cc0a379ddf7bda20d22ad2961d31649f3733e2b26"} Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.942049 5081 scope.go:117] "RemoveContainer" containerID="c14bf9fdfb6fe02b68250b4260c28613b3a6d7323e3eb76068e4d36b0f430635" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.944388 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9zm9c" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.944431 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9zm9c" event={"ID":"b9b4deda-b48b-4c00-8a09-fab4b2e7f95c","Type":"ContainerDied","Data":"b5b7758a77e08cb74b824e3f5ceae401c33438339242fcf9757617207cbabc9c"} Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.944455 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5b7758a77e08cb74b824e3f5ceae401c33438339242fcf9757617207cbabc9c" Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.947418 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e","Type":"ContainerStarted","Data":"e4aadb07471fab375989d47e2dfc014984c5989cf6d9621542c6639b837176ac"} Oct 03 15:51:22 crc kubenswrapper[5081]: I1003 15:51:22.994661 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.994640737 podStartE2EDuration="3.994640737s" podCreationTimestamp="2025-10-03 15:51:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:22.981727136 +0000 UTC m=+1401.947283749" watchObservedRunningTime="2025-10-03 15:51:22.994640737 +0000 UTC m=+1401.960197350" Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.014049 5081 scope.go:117] "RemoveContainer" containerID="63f8fcb4ff665dedcfbfc051a978fa3194e0487aaf89da56acd023add4fdef5d" Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.044494 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.044728 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-log" containerID="cri-o://283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be" gracePeriod=30 Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.044865 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-api" containerID="cri-o://331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2" gracePeriod=30 Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.070591 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77d8d5886f-sw2hj"] Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.083826 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77d8d5886f-sw2hj"] Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.095697 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.113484 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.838025 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32b06890-1db0-4586-8127-0be88c3d6e42" path="/var/lib/kubelet/pods/32b06890-1db0-4586-8127-0be88c3d6e42/volumes" Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.958441 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerStarted","Data":"3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8"} Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.960911 5081 generic.go:334] "Generic (PLEG): container finished" podID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerID="283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be" exitCode=143 Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.960947 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399","Type":"ContainerDied","Data":"283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be"} Oct 03 15:51:23 crc kubenswrapper[5081]: I1003 15:51:23.961070 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e2bbb392-2039-46d8-8719-98ef8e4c5f1b" containerName="nova-scheduler-scheduler" containerID="cri-o://5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979" gracePeriod=30 Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.958968 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.970425 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerStarted","Data":"80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737"} Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.972439 5081 generic.go:334] "Generic (PLEG): container finished" podID="e2bbb392-2039-46d8-8719-98ef8e4c5f1b" containerID="5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979" exitCode=0 Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.972655 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-log" containerID="cri-o://aba14457cf689c0824711ed187d7d0f81415153d40fbc73ca0af560182d0dc74" gracePeriod=30 Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.972909 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.975020 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bbb392-2039-46d8-8719-98ef8e4c5f1b","Type":"ContainerDied","Data":"5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979"} Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.975060 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bbb392-2039-46d8-8719-98ef8e4c5f1b","Type":"ContainerDied","Data":"1877dde39605820ef5e7d6cc9b37ca89f603285b3d7f84db358b274a3d6bec36"} Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.975079 5081 scope.go:117] "RemoveContainer" containerID="5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979" Oct 03 15:51:24 crc kubenswrapper[5081]: I1003 15:51:24.975176 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-metadata" containerID="cri-o://e4aadb07471fab375989d47e2dfc014984c5989cf6d9621542c6639b837176ac" gracePeriod=30 Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.008631 5081 scope.go:117] "RemoveContainer" containerID="5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979" Oct 03 15:51:25 crc kubenswrapper[5081]: E1003 15:51:25.009103 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979\": container with ID starting with 5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979 not found: ID does not exist" containerID="5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.009146 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979"} err="failed to get container status \"5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979\": rpc error: code = NotFound desc = could not find container \"5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979\": container with ID starting with 5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979 not found: ID does not exist" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.021480 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-config-data\") pod \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.021760 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-combined-ca-bundle\") pod \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.021813 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4s75\" (UniqueName: \"kubernetes.io/projected/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-kube-api-access-v4s75\") pod \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\" (UID: \"e2bbb392-2039-46d8-8719-98ef8e4c5f1b\") " Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.032818 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-kube-api-access-v4s75" (OuterVolumeSpecName: "kube-api-access-v4s75") pod "e2bbb392-2039-46d8-8719-98ef8e4c5f1b" (UID: "e2bbb392-2039-46d8-8719-98ef8e4c5f1b"). InnerVolumeSpecName "kube-api-access-v4s75". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.053463 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-config-data" (OuterVolumeSpecName: "config-data") pod "e2bbb392-2039-46d8-8719-98ef8e4c5f1b" (UID: "e2bbb392-2039-46d8-8719-98ef8e4c5f1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.054249 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2bbb392-2039-46d8-8719-98ef8e4c5f1b" (UID: "e2bbb392-2039-46d8-8719-98ef8e4c5f1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.125431 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.125485 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4s75\" (UniqueName: \"kubernetes.io/projected/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-kube-api-access-v4s75\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.125499 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bbb392-2039-46d8-8719-98ef8e4c5f1b-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.302048 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.313246 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.325239 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:25 crc kubenswrapper[5081]: E1003 15:51:25.325655 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32b06890-1db0-4586-8127-0be88c3d6e42" containerName="dnsmasq-dns" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.325672 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="32b06890-1db0-4586-8127-0be88c3d6e42" containerName="dnsmasq-dns" Oct 03 15:51:25 crc kubenswrapper[5081]: E1003 15:51:25.325699 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bbb392-2039-46d8-8719-98ef8e4c5f1b" containerName="nova-scheduler-scheduler" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.325706 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bbb392-2039-46d8-8719-98ef8e4c5f1b" containerName="nova-scheduler-scheduler" Oct 03 15:51:25 crc kubenswrapper[5081]: E1003 15:51:25.325716 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" containerName="nova-manage" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.325722 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" containerName="nova-manage" Oct 03 15:51:25 crc kubenswrapper[5081]: E1003 15:51:25.325742 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32b06890-1db0-4586-8127-0be88c3d6e42" containerName="init" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.325748 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="32b06890-1db0-4586-8127-0be88c3d6e42" containerName="init" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.326037 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2bbb392-2039-46d8-8719-98ef8e4c5f1b" containerName="nova-scheduler-scheduler" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.326070 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" containerName="nova-manage" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.326080 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="32b06890-1db0-4586-8127-0be88c3d6e42" containerName="dnsmasq-dns" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.326655 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.333403 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.367783 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.430233 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfrdf\" (UniqueName: \"kubernetes.io/projected/82268010-ad26-4d43-9789-ca15b8e3e394-kube-api-access-jfrdf\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.430625 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-config-data\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.430731 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.485642 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.485699 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.532778 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfrdf\" (UniqueName: \"kubernetes.io/projected/82268010-ad26-4d43-9789-ca15b8e3e394-kube-api-access-jfrdf\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.532957 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-config-data\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.533002 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.538735 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.551338 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-config-data\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.551764 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfrdf\" (UniqueName: \"kubernetes.io/projected/82268010-ad26-4d43-9789-ca15b8e3e394-kube-api-access-jfrdf\") pod \"nova-scheduler-0\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.694050 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.861528 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2bbb392-2039-46d8-8719-98ef8e4c5f1b" path="/var/lib/kubelet/pods/e2bbb392-2039-46d8-8719-98ef8e4c5f1b/volumes" Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.983859 5081 generic.go:334] "Generic (PLEG): container finished" podID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerID="e4aadb07471fab375989d47e2dfc014984c5989cf6d9621542c6639b837176ac" exitCode=0 Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.983886 5081 generic.go:334] "Generic (PLEG): container finished" podID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerID="aba14457cf689c0824711ed187d7d0f81415153d40fbc73ca0af560182d0dc74" exitCode=143 Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.983921 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e","Type":"ContainerDied","Data":"e4aadb07471fab375989d47e2dfc014984c5989cf6d9621542c6639b837176ac"} Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.983946 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e","Type":"ContainerDied","Data":"aba14457cf689c0824711ed187d7d0f81415153d40fbc73ca0af560182d0dc74"} Oct 03 15:51:25 crc kubenswrapper[5081]: I1003 15:51:25.992025 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerStarted","Data":"23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6"} Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.114761 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.178525 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.247838 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-logs\") pod \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.247907 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-combined-ca-bundle\") pod \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.247974 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-nova-metadata-tls-certs\") pod \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.248002 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-config-data\") pod \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.248062 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8dts\" (UniqueName: \"kubernetes.io/projected/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-kube-api-access-c8dts\") pod \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\" (UID: \"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e\") " Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.248232 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-logs" (OuterVolumeSpecName: "logs") pod "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" (UID: "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.248616 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.252055 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-kube-api-access-c8dts" (OuterVolumeSpecName: "kube-api-access-c8dts") pod "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" (UID: "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e"). InnerVolumeSpecName "kube-api-access-c8dts". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.278468 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" (UID: "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.281324 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-config-data" (OuterVolumeSpecName: "config-data") pod "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" (UID: "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.309489 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" (UID: "ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.350452 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8dts\" (UniqueName: \"kubernetes.io/projected/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-kube-api-access-c8dts\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.350496 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.350510 5081 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.350523 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.626038 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zmn5h"] Oct 03 15:51:26 crc kubenswrapper[5081]: E1003 15:51:26.627186 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-metadata" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.627293 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-metadata" Oct 03 15:51:26 crc kubenswrapper[5081]: E1003 15:51:26.627335 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-log" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.627345 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-log" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.627627 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-log" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.627666 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" containerName="nova-metadata-metadata" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.629694 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.644062 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zmn5h"] Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.757586 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-catalog-content\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.757663 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-utilities\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.757745 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m8q7\" (UniqueName: \"kubernetes.io/projected/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-kube-api-access-6m8q7\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.860103 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m8q7\" (UniqueName: \"kubernetes.io/projected/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-kube-api-access-6m8q7\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.860299 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-catalog-content\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.860344 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-utilities\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.860965 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-utilities\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.861587 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-catalog-content\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.879226 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m8q7\" (UniqueName: \"kubernetes.io/projected/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-kube-api-access-6m8q7\") pod \"redhat-operators-zmn5h\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:26 crc kubenswrapper[5081]: I1003 15:51:26.957463 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.013309 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"82268010-ad26-4d43-9789-ca15b8e3e394","Type":"ContainerStarted","Data":"6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c"} Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.013382 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"82268010-ad26-4d43-9789-ca15b8e3e394","Type":"ContainerStarted","Data":"b3178aeb75e883a283ca3319eb1c7cc4f1fd6765d2c0e186b9fbb59741b7d6d3"} Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.016657 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e","Type":"ContainerDied","Data":"af441244db1245314e57b2639473068e1b7f451198fdc928f202487aa67bc1e2"} Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.016713 5081 scope.go:117] "RemoveContainer" containerID="e4aadb07471fab375989d47e2dfc014984c5989cf6d9621542c6639b837176ac" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.016874 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.035214 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.035195405 podStartE2EDuration="2.035195405s" podCreationTimestamp="2025-10-03 15:51:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:27.031492579 +0000 UTC m=+1405.997049202" watchObservedRunningTime="2025-10-03 15:51:27.035195405 +0000 UTC m=+1406.000752018" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.088261 5081 scope.go:117] "RemoveContainer" containerID="aba14457cf689c0824711ed187d7d0f81415153d40fbc73ca0af560182d0dc74" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.101882 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.118326 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.135766 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.138052 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.144782 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.152060 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.170393 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.271784 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s77b4\" (UniqueName: \"kubernetes.io/projected/6b857716-836c-4a65-87e8-43ddd0e8ff4c-kube-api-access-s77b4\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.271847 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b857716-836c-4a65-87e8-43ddd0e8ff4c-logs\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.271923 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-config-data\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.271957 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.271978 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.373285 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.373846 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.373999 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s77b4\" (UniqueName: \"kubernetes.io/projected/6b857716-836c-4a65-87e8-43ddd0e8ff4c-kube-api-access-s77b4\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.374065 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b857716-836c-4a65-87e8-43ddd0e8ff4c-logs\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.374148 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-config-data\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.374649 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b857716-836c-4a65-87e8-43ddd0e8ff4c-logs\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.381220 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.385577 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-config-data\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.392338 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s77b4\" (UniqueName: \"kubernetes.io/projected/6b857716-836c-4a65-87e8-43ddd0e8ff4c-kube-api-access-s77b4\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.395136 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.466180 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.488664 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zmn5h"] Oct 03 15:51:27 crc kubenswrapper[5081]: I1003 15:51:27.844231 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e" path="/var/lib/kubelet/pods/ca50ca0e-1baf-4b3c-9348-eb60ee4bf46e/volumes" Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.004055 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.027441 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b857716-836c-4a65-87e8-43ddd0e8ff4c","Type":"ContainerStarted","Data":"74d9beafe8806621f4279f2792aa5a565bfeb0e5c661683dc098d348ca1f440d"} Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.029761 5081 generic.go:334] "Generic (PLEG): container finished" podID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerID="c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1" exitCode=0 Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.029832 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmn5h" event={"ID":"74f65dcb-c1bf-47cd-b29c-903fd1f4239d","Type":"ContainerDied","Data":"c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1"} Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.029856 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmn5h" event={"ID":"74f65dcb-c1bf-47cd-b29c-903fd1f4239d","Type":"ContainerStarted","Data":"f2a4e15bc679e4692cc2a18144f6083920869c81f09e3f5703a9e103177f050e"} Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.045735 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerStarted","Data":"717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357"} Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.045904 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 15:51:28 crc kubenswrapper[5081]: I1003 15:51:28.095040 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.562110744 podStartE2EDuration="8.095015296s" podCreationTimestamp="2025-10-03 15:51:20 +0000 UTC" firstStartedPulling="2025-10-03 15:51:21.132205498 +0000 UTC m=+1400.097762111" lastFinishedPulling="2025-10-03 15:51:27.66511005 +0000 UTC m=+1406.630666663" observedRunningTime="2025-10-03 15:51:28.092929156 +0000 UTC m=+1407.058485759" watchObservedRunningTime="2025-10-03 15:51:28.095015296 +0000 UTC m=+1407.060571909" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.043002 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.056660 5081 generic.go:334] "Generic (PLEG): container finished" podID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerID="331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2" exitCode=0 Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.056729 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.056738 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399","Type":"ContainerDied","Data":"331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2"} Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.056849 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399","Type":"ContainerDied","Data":"96e5315e434534af15ce25aaea7c7bcc355a000bfe2e4a39fdbd051752694b8b"} Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.056867 5081 scope.go:117] "RemoveContainer" containerID="331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.061709 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b857716-836c-4a65-87e8-43ddd0e8ff4c","Type":"ContainerStarted","Data":"b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c"} Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.061746 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b857716-836c-4a65-87e8-43ddd0e8ff4c","Type":"ContainerStarted","Data":"5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40"} Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.096817 5081 scope.go:117] "RemoveContainer" containerID="283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.101422 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.10140408 podStartE2EDuration="2.10140408s" podCreationTimestamp="2025-10-03 15:51:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:29.091084394 +0000 UTC m=+1408.056641037" watchObservedRunningTime="2025-10-03 15:51:29.10140408 +0000 UTC m=+1408.066960693" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.117454 5081 scope.go:117] "RemoveContainer" containerID="331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2" Oct 03 15:51:29 crc kubenswrapper[5081]: E1003 15:51:29.117968 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2\": container with ID starting with 331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2 not found: ID does not exist" containerID="331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.118006 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2"} err="failed to get container status \"331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2\": rpc error: code = NotFound desc = could not find container \"331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2\": container with ID starting with 331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2 not found: ID does not exist" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.118034 5081 scope.go:117] "RemoveContainer" containerID="283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be" Oct 03 15:51:29 crc kubenswrapper[5081]: E1003 15:51:29.118315 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be\": container with ID starting with 283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be not found: ID does not exist" containerID="283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.118349 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be"} err="failed to get container status \"283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be\": rpc error: code = NotFound desc = could not find container \"283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be\": container with ID starting with 283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be not found: ID does not exist" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.120106 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-combined-ca-bundle\") pod \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.120198 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-logs\") pod \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.120293 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-config-data\") pod \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.120354 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqsqg\" (UniqueName: \"kubernetes.io/projected/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-kube-api-access-mqsqg\") pod \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\" (UID: \"a66f4ab1-bd4d-403e-bc90-bfdb9d66b399\") " Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.120869 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-logs" (OuterVolumeSpecName: "logs") pod "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" (UID: "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.131058 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-kube-api-access-mqsqg" (OuterVolumeSpecName: "kube-api-access-mqsqg") pod "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" (UID: "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399"). InnerVolumeSpecName "kube-api-access-mqsqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.147276 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" (UID: "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.148133 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-config-data" (OuterVolumeSpecName: "config-data") pod "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" (UID: "a66f4ab1-bd4d-403e-bc90-bfdb9d66b399"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.222350 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqsqg\" (UniqueName: \"kubernetes.io/projected/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-kube-api-access-mqsqg\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.222384 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.222393 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.222401 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.389475 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.398075 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.413631 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:29 crc kubenswrapper[5081]: E1003 15:51:29.414552 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-log" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.414593 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-log" Oct 03 15:51:29 crc kubenswrapper[5081]: E1003 15:51:29.414619 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-api" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.414629 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-api" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.414900 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-api" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.414922 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" containerName="nova-api-log" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.416394 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.419006 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.424890 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.527452 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.527639 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-config-data\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.527700 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mdrn\" (UniqueName: \"kubernetes.io/projected/e5a13d17-4b60-408a-be28-f9e8eb1d2465-kube-api-access-4mdrn\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.527801 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5a13d17-4b60-408a-be28-f9e8eb1d2465-logs\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.629051 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-config-data\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.629112 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mdrn\" (UniqueName: \"kubernetes.io/projected/e5a13d17-4b60-408a-be28-f9e8eb1d2465-kube-api-access-4mdrn\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.629168 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5a13d17-4b60-408a-be28-f9e8eb1d2465-logs\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.629236 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.629773 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5a13d17-4b60-408a-be28-f9e8eb1d2465-logs\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.634958 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-config-data\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.638897 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.647316 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mdrn\" (UniqueName: \"kubernetes.io/projected/e5a13d17-4b60-408a-be28-f9e8eb1d2465-kube-api-access-4mdrn\") pod \"nova-api-0\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.743059 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:29 crc kubenswrapper[5081]: I1003 15:51:29.839007 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a66f4ab1-bd4d-403e-bc90-bfdb9d66b399" path="/var/lib/kubelet/pods/a66f4ab1-bd4d-403e-bc90-bfdb9d66b399/volumes" Oct 03 15:51:30 crc kubenswrapper[5081]: I1003 15:51:30.694840 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 15:51:30 crc kubenswrapper[5081]: I1003 15:51:30.807773 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:31 crc kubenswrapper[5081]: I1003 15:51:31.080782 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5a13d17-4b60-408a-be28-f9e8eb1d2465","Type":"ContainerStarted","Data":"eac9124ace114c38dfd8b03d753df510607e828f4eda79f015a82ea8677cad2e"} Oct 03 15:51:31 crc kubenswrapper[5081]: I1003 15:51:31.083606 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmn5h" event={"ID":"74f65dcb-c1bf-47cd-b29c-903fd1f4239d","Type":"ContainerStarted","Data":"08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c"} Oct 03 15:51:32 crc kubenswrapper[5081]: I1003 15:51:32.093902 5081 generic.go:334] "Generic (PLEG): container finished" podID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerID="08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c" exitCode=0 Oct 03 15:51:32 crc kubenswrapper[5081]: I1003 15:51:32.093959 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmn5h" event={"ID":"74f65dcb-c1bf-47cd-b29c-903fd1f4239d","Type":"ContainerDied","Data":"08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c"} Oct 03 15:51:32 crc kubenswrapper[5081]: I1003 15:51:32.097413 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5a13d17-4b60-408a-be28-f9e8eb1d2465","Type":"ContainerStarted","Data":"f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d"} Oct 03 15:51:32 crc kubenswrapper[5081]: I1003 15:51:32.097461 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5a13d17-4b60-408a-be28-f9e8eb1d2465","Type":"ContainerStarted","Data":"418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076"} Oct 03 15:51:32 crc kubenswrapper[5081]: I1003 15:51:32.136346 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.136324836 podStartE2EDuration="3.136324836s" podCreationTimestamp="2025-10-03 15:51:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:32.130700275 +0000 UTC m=+1411.096256908" watchObservedRunningTime="2025-10-03 15:51:32.136324836 +0000 UTC m=+1411.101881469" Oct 03 15:51:32 crc kubenswrapper[5081]: I1003 15:51:32.467096 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 15:51:32 crc kubenswrapper[5081]: I1003 15:51:32.467147 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 15:51:34 crc kubenswrapper[5081]: I1003 15:51:34.119630 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmn5h" event={"ID":"74f65dcb-c1bf-47cd-b29c-903fd1f4239d","Type":"ContainerStarted","Data":"5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b"} Oct 03 15:51:34 crc kubenswrapper[5081]: I1003 15:51:34.147888 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zmn5h" podStartSLOduration=3.1009939380000002 podStartE2EDuration="8.14786815s" podCreationTimestamp="2025-10-03 15:51:26 +0000 UTC" firstStartedPulling="2025-10-03 15:51:28.031468739 +0000 UTC m=+1406.997025352" lastFinishedPulling="2025-10-03 15:51:33.078342951 +0000 UTC m=+1412.043899564" observedRunningTime="2025-10-03 15:51:34.14022212 +0000 UTC m=+1413.105778733" watchObservedRunningTime="2025-10-03 15:51:34.14786815 +0000 UTC m=+1413.113424763" Oct 03 15:51:35 crc kubenswrapper[5081]: I1003 15:51:35.695073 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 15:51:35 crc kubenswrapper[5081]: I1003 15:51:35.723286 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 15:51:36 crc kubenswrapper[5081]: I1003 15:51:36.138692 5081 generic.go:334] "Generic (PLEG): container finished" podID="0487cd14-638b-44f4-9154-d5d57307f0bd" containerID="2742fc690da949e9d6cb940b865585ee6b2e33a45f3f8c6d67539be2bb0ca78b" exitCode=0 Oct 03 15:51:36 crc kubenswrapper[5081]: I1003 15:51:36.138755 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" event={"ID":"0487cd14-638b-44f4-9154-d5d57307f0bd","Type":"ContainerDied","Data":"2742fc690da949e9d6cb940b865585ee6b2e33a45f3f8c6d67539be2bb0ca78b"} Oct 03 15:51:36 crc kubenswrapper[5081]: I1003 15:51:36.193358 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 15:51:36 crc kubenswrapper[5081]: I1003 15:51:36.957972 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:36 crc kubenswrapper[5081]: I1003 15:51:36.958026 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.471049 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.471724 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.538273 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.580703 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc7dw\" (UniqueName: \"kubernetes.io/projected/0487cd14-638b-44f4-9154-d5d57307f0bd-kube-api-access-pc7dw\") pod \"0487cd14-638b-44f4-9154-d5d57307f0bd\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.580811 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-scripts\") pod \"0487cd14-638b-44f4-9154-d5d57307f0bd\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.581054 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-config-data\") pod \"0487cd14-638b-44f4-9154-d5d57307f0bd\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.581097 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-combined-ca-bundle\") pod \"0487cd14-638b-44f4-9154-d5d57307f0bd\" (UID: \"0487cd14-638b-44f4-9154-d5d57307f0bd\") " Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.589989 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0487cd14-638b-44f4-9154-d5d57307f0bd-kube-api-access-pc7dw" (OuterVolumeSpecName: "kube-api-access-pc7dw") pod "0487cd14-638b-44f4-9154-d5d57307f0bd" (UID: "0487cd14-638b-44f4-9154-d5d57307f0bd"). InnerVolumeSpecName "kube-api-access-pc7dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.590265 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-scripts" (OuterVolumeSpecName: "scripts") pod "0487cd14-638b-44f4-9154-d5d57307f0bd" (UID: "0487cd14-638b-44f4-9154-d5d57307f0bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.621534 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-config-data" (OuterVolumeSpecName: "config-data") pod "0487cd14-638b-44f4-9154-d5d57307f0bd" (UID: "0487cd14-638b-44f4-9154-d5d57307f0bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.628113 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0487cd14-638b-44f4-9154-d5d57307f0bd" (UID: "0487cd14-638b-44f4-9154-d5d57307f0bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.683214 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.683256 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.683267 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc7dw\" (UniqueName: \"kubernetes.io/projected/0487cd14-638b-44f4-9154-d5d57307f0bd-kube-api-access-pc7dw\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:37 crc kubenswrapper[5081]: I1003 15:51:37.683275 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0487cd14-638b-44f4-9154-d5d57307f0bd-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.002892 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zmn5h" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="registry-server" probeResult="failure" output=< Oct 03 15:51:38 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 15:51:38 crc kubenswrapper[5081]: > Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.159929 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" event={"ID":"0487cd14-638b-44f4-9154-d5d57307f0bd","Type":"ContainerDied","Data":"af80061eec5e9ec6f6732ff52f72bc0ef96a4e348b9da62d2d52b65ecab53cf6"} Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.159981 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af80061eec5e9ec6f6732ff52f72bc0ef96a4e348b9da62d2d52b65ecab53cf6" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.160007 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jcbmh" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.239122 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 15:51:38 crc kubenswrapper[5081]: E1003 15:51:38.239678 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0487cd14-638b-44f4-9154-d5d57307f0bd" containerName="nova-cell1-conductor-db-sync" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.239699 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0487cd14-638b-44f4-9154-d5d57307f0bd" containerName="nova-cell1-conductor-db-sync" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.239903 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0487cd14-638b-44f4-9154-d5d57307f0bd" containerName="nova-cell1-conductor-db-sync" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.240704 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.244184 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.254121 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.294946 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.295115 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.295333 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smv7r\" (UniqueName: \"kubernetes.io/projected/f1151206-1989-4b3a-bc02-176a6f3cf481-kube-api-access-smv7r\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.396862 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.396952 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.396989 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smv7r\" (UniqueName: \"kubernetes.io/projected/f1151206-1989-4b3a-bc02-176a6f3cf481-kube-api-access-smv7r\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.400632 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.410352 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.446151 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smv7r\" (UniqueName: \"kubernetes.io/projected/f1151206-1989-4b3a-bc02-176a6f3cf481-kube-api-access-smv7r\") pod \"nova-cell1-conductor-0\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.480800 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.480800 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:51:38 crc kubenswrapper[5081]: I1003 15:51:38.558511 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:39 crc kubenswrapper[5081]: I1003 15:51:39.023842 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 15:51:39 crc kubenswrapper[5081]: I1003 15:51:39.173921 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"f1151206-1989-4b3a-bc02-176a6f3cf481","Type":"ContainerStarted","Data":"228fa2f1c9585705ded2b3e1324e46b5764f869d03351d493b4374d44258995a"} Oct 03 15:51:39 crc kubenswrapper[5081]: I1003 15:51:39.744239 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:51:39 crc kubenswrapper[5081]: I1003 15:51:39.744302 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:51:40 crc kubenswrapper[5081]: I1003 15:51:40.196810 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"f1151206-1989-4b3a-bc02-176a6f3cf481","Type":"ContainerStarted","Data":"8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3"} Oct 03 15:51:40 crc kubenswrapper[5081]: I1003 15:51:40.198099 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:40 crc kubenswrapper[5081]: I1003 15:51:40.214409 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.214392907 podStartE2EDuration="2.214392907s" podCreationTimestamp="2025-10-03 15:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:40.212627717 +0000 UTC m=+1419.178184330" watchObservedRunningTime="2025-10-03 15:51:40.214392907 +0000 UTC m=+1419.179949520" Oct 03 15:51:40 crc kubenswrapper[5081]: I1003 15:51:40.828951 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:51:40 crc kubenswrapper[5081]: I1003 15:51:40.829434 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:51:46 crc kubenswrapper[5081]: E1003 15:51:46.265600 5081 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/4dce3cb63b4eaf92b0b2784eed0b73625480bac0eead8504ebecba80c31a3154/diff" to get inode usage: stat /var/lib/containers/storage/overlay/4dce3cb63b4eaf92b0b2784eed0b73625480bac0eead8504ebecba80c31a3154/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_dnsmasq-dns-77d8d5886f-sw2hj_32b06890-1db0-4586-8127-0be88c3d6e42/dnsmasq-dns/0.log" to get inode usage: stat /var/log/pods/openstack_dnsmasq-dns-77d8d5886f-sw2hj_32b06890-1db0-4586-8127-0be88c3d6e42/dnsmasq-dns/0.log: no such file or directory Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.342515 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-znjbw"] Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.344819 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.358195 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-znjbw"] Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.461873 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-utilities\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.461931 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92rnk\" (UniqueName: \"kubernetes.io/projected/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-kube-api-access-92rnk\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.462017 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-catalog-content\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.563763 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-utilities\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.563815 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92rnk\" (UniqueName: \"kubernetes.io/projected/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-kube-api-access-92rnk\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.563856 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-catalog-content\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.564377 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-catalog-content\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.564725 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-utilities\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.586061 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92rnk\" (UniqueName: \"kubernetes.io/projected/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-kube-api-access-92rnk\") pod \"community-operators-znjbw\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:46 crc kubenswrapper[5081]: I1003 15:51:46.681962 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.021630 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.071417 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.220324 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-znjbw"] Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.277546 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-znjbw" event={"ID":"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa","Type":"ContainerStarted","Data":"61ad3c79212bdb2bdb8f4fae967af0edf0046f0379322cadb3339914d14992b1"} Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.473313 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.473402 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.478630 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 15:51:47 crc kubenswrapper[5081]: I1003 15:51:47.479171 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 15:51:47 crc kubenswrapper[5081]: W1003 15:51:47.862258 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-conmon-576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-conmon-576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976.scope: no such file or directory Oct 03 15:51:47 crc kubenswrapper[5081]: W1003 15:51:47.862330 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice/crio-conmon-331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice/crio-conmon-331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2.scope: no such file or directory Oct 03 15:51:47 crc kubenswrapper[5081]: W1003 15:51:47.862355 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-576dc4eccddf5388ac0a74c0709c4a2952b4036b7c9f585fdc12138d3b164976.scope: no such file or directory Oct 03 15:51:47 crc kubenswrapper[5081]: W1003 15:51:47.862395 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice/crio-331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice/crio-331408bf0ba27d1d1c9e538dbe7e4b7488a90aa5bb575709b500c1f3fb76deb2.scope: no such file or directory Oct 03 15:51:47 crc kubenswrapper[5081]: W1003 15:51:47.869639 5081 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca50ca0e_1baf_4b3c_9348_eb60ee4bf46e.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca50ca0e_1baf_4b3c_9348_eb60ee4bf46e.slice: no such file or directory Oct 03 15:51:47 crc kubenswrapper[5081]: W1003 15:51:47.882313 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice/crio-283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be.scope WatchSource:0}: Error finding container 283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be: Status 404 returned error can't find the container with id 283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be Oct 03 15:51:47 crc kubenswrapper[5081]: W1003 15:51:47.882510 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2bbb392_2039_46d8_8719_98ef8e4c5f1b.slice/crio-5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979.scope WatchSource:0}: Error finding container 5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979: Status 404 returned error can't find the container with id 5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979 Oct 03 15:51:48 crc kubenswrapper[5081]: E1003 15:51:48.124246 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19c01283_ab95_4be2_8fdb_7948cfce70e4.slice/crio-conmon-d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2bbb392_2039_46d8_8719_98ef8e4c5f1b.slice/crio-1877dde39605820ef5e7d6cc9b37ca89f603285b3d7f84db358b274a3d6bec36\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0487cd14_638b_44f4_9154_d5d57307f0bd.slice/crio-conmon-2742fc690da949e9d6cb940b865585ee6b2e33a45f3f8c6d67539be2bb0ca78b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32b06890_1db0_4586_8127_0be88c3d6e42.slice/crio-conmon-c14bf9fdfb6fe02b68250b4260c28613b3a6d7323e3eb76068e4d36b0f430635.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice/crio-96e5315e434534af15ce25aaea7c7bcc355a000bfe2e4a39fdbd051752694b8b\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9b4deda_b48b_4c00_8a09_fab4b2e7f95c.slice/crio-conmon-03a0d5e5f0d7d2e3256428e9dfbde99cc9dffb0c85d0c20febe5d5a351f19d27.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32b06890_1db0_4586_8127_0be88c3d6e42.slice/crio-c14bf9fdfb6fe02b68250b4260c28613b3a6d7323e3eb76068e4d36b0f430635.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19c01283_ab95_4be2_8fdb_7948cfce70e4.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b811a31_d0b5_4c76_9515_d733e1480221.slice/crio-edbaaecd51dba0f7822f6520dce9828b030e4da489152c7d580baa223a81d2a9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2dba3895_f647_424c_bef4_7d25827a0343.slice/crio-conmon-79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2bbb392_2039_46d8_8719_98ef8e4c5f1b.slice/crio-conmon-5d2f8e3e7bc68a9c6a2752a55469db0ae57d4724634ba230575fbe7539673979.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2bbb392_2039_46d8_8719_98ef8e4c5f1b.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19c01283_ab95_4be2_8fdb_7948cfce70e4.slice/crio-d3d14e0cab3222af1e702fe508bc53e1310b71f00f1bc20fb1d23e231ade2662.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32b06890_1db0_4586_8127_0be88c3d6e42.slice/crio-f6fe3e07305c0a00b0ab846cc0a379ddf7bda20d22ad2961d31649f3733e2b26\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19c01283_ab95_4be2_8fdb_7948cfce70e4.slice/crio-6d264089a0c5db12147c742cd6ef46bf03e00683528a315a99e5ed00f992f046\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0487cd14_638b_44f4_9154_d5d57307f0bd.slice/crio-af80061eec5e9ec6f6732ff52f72bc0ef96a4e348b9da62d2d52b65ecab53cf6\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66f4ab1_bd4d_403e_bc90_bfdb9d66b399.slice/crio-conmon-283bae0974545525ab84bcffb54544c73df2fbcffae74ab72df4239b71c394be.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2dba3895_f647_424c_bef4_7d25827a0343.slice/crio-79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9b4deda_b48b_4c00_8a09_fab4b2e7f95c.slice/crio-03a0d5e5f0d7d2e3256428e9dfbde99cc9dffb0c85d0c20febe5d5a351f19d27.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0487cd14_638b_44f4_9154_d5d57307f0bd.slice/crio-2742fc690da949e9d6cb940b865585ee6b2e33a45f3f8c6d67539be2bb0ca78b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32b06890_1db0_4586_8127_0be88c3d6e42.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9b4deda_b48b_4c00_8a09_fab4b2e7f95c.slice/crio-b5b7758a77e08cb74b824e3f5ceae401c33438339242fcf9757617207cbabc9c\": RecentStats: unable to find data in memory cache]" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.263905 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.293098 5081 generic.go:334] "Generic (PLEG): container finished" podID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerID="8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061" exitCode=0 Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.293189 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-znjbw" event={"ID":"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa","Type":"ContainerDied","Data":"8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061"} Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.293830 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-config-data\") pod \"2dba3895-f647-424c-bef4-7d25827a0343\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.293898 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-combined-ca-bundle\") pod \"2dba3895-f647-424c-bef4-7d25827a0343\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.293946 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh4vv\" (UniqueName: \"kubernetes.io/projected/2dba3895-f647-424c-bef4-7d25827a0343-kube-api-access-wh4vv\") pod \"2dba3895-f647-424c-bef4-7d25827a0343\" (UID: \"2dba3895-f647-424c-bef4-7d25827a0343\") " Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.301442 5081 generic.go:334] "Generic (PLEG): container finished" podID="2dba3895-f647-424c-bef4-7d25827a0343" containerID="79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225" exitCode=137 Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.301509 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.301578 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2dba3895-f647-424c-bef4-7d25827a0343","Type":"ContainerDied","Data":"79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225"} Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.301605 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2dba3895-f647-424c-bef4-7d25827a0343","Type":"ContainerDied","Data":"899bbc4cd1b351dab196c0652fe12ed8bb6f77cb55b7894f7127b042eb4dfcd8"} Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.301625 5081 scope.go:117] "RemoveContainer" containerID="79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.301835 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dba3895-f647-424c-bef4-7d25827a0343-kube-api-access-wh4vv" (OuterVolumeSpecName: "kube-api-access-wh4vv") pod "2dba3895-f647-424c-bef4-7d25827a0343" (UID: "2dba3895-f647-424c-bef4-7d25827a0343"). InnerVolumeSpecName "kube-api-access-wh4vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.329470 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2dba3895-f647-424c-bef4-7d25827a0343" (UID: "2dba3895-f647-424c-bef4-7d25827a0343"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.330628 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-config-data" (OuterVolumeSpecName: "config-data") pod "2dba3895-f647-424c-bef4-7d25827a0343" (UID: "2dba3895-f647-424c-bef4-7d25827a0343"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.395962 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.396012 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dba3895-f647-424c-bef4-7d25827a0343-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.396025 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh4vv\" (UniqueName: \"kubernetes.io/projected/2dba3895-f647-424c-bef4-7d25827a0343-kube-api-access-wh4vv\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.397850 5081 scope.go:117] "RemoveContainer" containerID="79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225" Oct 03 15:51:48 crc kubenswrapper[5081]: E1003 15:51:48.398314 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225\": container with ID starting with 79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225 not found: ID does not exist" containerID="79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.398344 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225"} err="failed to get container status \"79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225\": rpc error: code = NotFound desc = could not find container \"79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225\": container with ID starting with 79c9d523cae6fbacc50448526648c28e13e75ef5f8ca9b6d640069b59aef4225 not found: ID does not exist" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.595390 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.640434 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.656640 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.668379 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:48 crc kubenswrapper[5081]: E1003 15:51:48.668930 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dba3895-f647-424c-bef4-7d25827a0343" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.668956 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dba3895-f647-424c-bef4-7d25827a0343" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.669190 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dba3895-f647-424c-bef4-7d25827a0343" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.669972 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.675034 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.675272 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.675335 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.682610 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.703618 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmdhx\" (UniqueName: \"kubernetes.io/projected/155bac7b-055b-4bca-a155-f5ab13dacf80-kube-api-access-dmdhx\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.703693 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.703718 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.703769 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.703811 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.805325 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.805631 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.805756 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.806255 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.806477 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmdhx\" (UniqueName: \"kubernetes.io/projected/155bac7b-055b-4bca-a155-f5ab13dacf80-kube-api-access-dmdhx\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.809685 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.809711 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.809895 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.810379 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.821547 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmdhx\" (UniqueName: \"kubernetes.io/projected/155bac7b-055b-4bca-a155-f5ab13dacf80-kube-api-access-dmdhx\") pod \"nova-cell1-novncproxy-0\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:48 crc kubenswrapper[5081]: I1003 15:51:48.989541 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:49 crc kubenswrapper[5081]: I1003 15:51:49.323642 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zmn5h"] Oct 03 15:51:49 crc kubenswrapper[5081]: I1003 15:51:49.325661 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zmn5h" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="registry-server" containerID="cri-o://5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b" gracePeriod=2 Oct 03 15:51:49 crc kubenswrapper[5081]: I1003 15:51:49.438665 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:51:49 crc kubenswrapper[5081]: W1003 15:51:49.480167 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod155bac7b_055b_4bca_a155_f5ab13dacf80.slice/crio-c47f7ae7f3a3f3d5783c5d4e9eba68e68c969be9cb8cde5977881377e12ea501 WatchSource:0}: Error finding container c47f7ae7f3a3f3d5783c5d4e9eba68e68c969be9cb8cde5977881377e12ea501: Status 404 returned error can't find the container with id c47f7ae7f3a3f3d5783c5d4e9eba68e68c969be9cb8cde5977881377e12ea501 Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.748333 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.749907 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.755724 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.761025 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.809429 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.844360 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dba3895-f647-424c-bef4-7d25827a0343" path="/var/lib/kubelet/pods/2dba3895-f647-424c-bef4-7d25827a0343/volumes" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.936075 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-utilities\") pod \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.936216 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-catalog-content\") pod \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.936264 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m8q7\" (UniqueName: \"kubernetes.io/projected/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-kube-api-access-6m8q7\") pod \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\" (UID: \"74f65dcb-c1bf-47cd-b29c-903fd1f4239d\") " Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.938197 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-utilities" (OuterVolumeSpecName: "utilities") pod "74f65dcb-c1bf-47cd-b29c-903fd1f4239d" (UID: "74f65dcb-c1bf-47cd-b29c-903fd1f4239d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.938693 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:49.941938 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-kube-api-access-6m8q7" (OuterVolumeSpecName: "kube-api-access-6m8q7") pod "74f65dcb-c1bf-47cd-b29c-903fd1f4239d" (UID: "74f65dcb-c1bf-47cd-b29c-903fd1f4239d"). InnerVolumeSpecName "kube-api-access-6m8q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.014873 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "74f65dcb-c1bf-47cd-b29c-903fd1f4239d" (UID: "74f65dcb-c1bf-47cd-b29c-903fd1f4239d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.040691 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.040720 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m8q7\" (UniqueName: \"kubernetes.io/projected/74f65dcb-c1bf-47cd-b29c-903fd1f4239d-kube-api-access-6m8q7\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.323273 5081 generic.go:334] "Generic (PLEG): container finished" podID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerID="5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b" exitCode=0 Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.323450 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmn5h" event={"ID":"74f65dcb-c1bf-47cd-b29c-903fd1f4239d","Type":"ContainerDied","Data":"5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b"} Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.323830 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zmn5h" event={"ID":"74f65dcb-c1bf-47cd-b29c-903fd1f4239d","Type":"ContainerDied","Data":"f2a4e15bc679e4692cc2a18144f6083920869c81f09e3f5703a9e103177f050e"} Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.323848 5081 scope.go:117] "RemoveContainer" containerID="5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.323524 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zmn5h" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.328003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"155bac7b-055b-4bca-a155-f5ab13dacf80","Type":"ContainerStarted","Data":"f24a2e28a7c0ad1349c328c219041c0c090d938313a1795249776310239401b8"} Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.328026 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"155bac7b-055b-4bca-a155-f5ab13dacf80","Type":"ContainerStarted","Data":"c47f7ae7f3a3f3d5783c5d4e9eba68e68c969be9cb8cde5977881377e12ea501"} Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.338216 5081 generic.go:334] "Generic (PLEG): container finished" podID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerID="bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899" exitCode=0 Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.339754 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-znjbw" event={"ID":"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa","Type":"ContainerDied","Data":"bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899"} Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.339790 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.352262 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.353164 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.353153614 podStartE2EDuration="2.353153614s" podCreationTimestamp="2025-10-03 15:51:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:50.351515927 +0000 UTC m=+1429.317072540" watchObservedRunningTime="2025-10-03 15:51:50.353153614 +0000 UTC m=+1429.318710227" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.354926 5081 scope.go:117] "RemoveContainer" containerID="08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.385760 5081 scope.go:117] "RemoveContainer" containerID="c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.400118 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zmn5h"] Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.409516 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zmn5h"] Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.439154 5081 scope.go:117] "RemoveContainer" containerID="5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b" Oct 03 15:51:51 crc kubenswrapper[5081]: E1003 15:51:50.439641 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b\": container with ID starting with 5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b not found: ID does not exist" containerID="5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.439678 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b"} err="failed to get container status \"5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b\": rpc error: code = NotFound desc = could not find container \"5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b\": container with ID starting with 5cee566f73cc11d06f5d9612aae21a2855e8c268415a11e6d99e656c0b25424b not found: ID does not exist" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.439712 5081 scope.go:117] "RemoveContainer" containerID="08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c" Oct 03 15:51:51 crc kubenswrapper[5081]: E1003 15:51:50.440339 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c\": container with ID starting with 08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c not found: ID does not exist" containerID="08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.440412 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c"} err="failed to get container status \"08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c\": rpc error: code = NotFound desc = could not find container \"08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c\": container with ID starting with 08da0ececa5bcbd20a7306dd85aee3f57cbf968a90fc5f1037d800942c7f818c not found: ID does not exist" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.440507 5081 scope.go:117] "RemoveContainer" containerID="c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1" Oct 03 15:51:51 crc kubenswrapper[5081]: E1003 15:51:50.440947 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1\": container with ID starting with c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1 not found: ID does not exist" containerID="c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.440973 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1"} err="failed to get container status \"c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1\": rpc error: code = NotFound desc = could not find container \"c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1\": container with ID starting with c235a8cfd68450fb7cda654ba2c0f25b395f25835a896182edc83937fbcbcbc1 not found: ID does not exist" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.563356 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84cbdd78c7-br9mh"] Oct 03 15:51:51 crc kubenswrapper[5081]: E1003 15:51:50.564219 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="registry-server" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.564242 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="registry-server" Oct 03 15:51:51 crc kubenswrapper[5081]: E1003 15:51:50.564294 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="extract-utilities" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.564303 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="extract-utilities" Oct 03 15:51:51 crc kubenswrapper[5081]: E1003 15:51:50.564315 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="extract-content" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.564324 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="extract-content" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.564710 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" containerName="registry-server" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.569745 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.584260 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84cbdd78c7-br9mh"] Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.655715 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-config\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.655782 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-sb\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.655836 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-swift-storage-0\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.655864 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-svc\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.655974 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5ztd\" (UniqueName: \"kubernetes.io/projected/ca3311fd-ac43-4729-81e1-472da7bf5878-kube-api-access-l5ztd\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.656039 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-nb\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.716155 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.758458 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-config\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.758530 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-sb\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.758605 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-svc\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.758622 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-swift-storage-0\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.759332 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5ztd\" (UniqueName: \"kubernetes.io/projected/ca3311fd-ac43-4729-81e1-472da7bf5878-kube-api-access-l5ztd\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.760060 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-sb\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.760060 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-svc\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.760296 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-swift-storage-0\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.760383 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-nb\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.760639 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-nb\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.761060 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-config\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.779431 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5ztd\" (UniqueName: \"kubernetes.io/projected/ca3311fd-ac43-4729-81e1-472da7bf5878-kube-api-access-l5ztd\") pod \"dnsmasq-dns-84cbdd78c7-br9mh\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:50.901704 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:51.462928 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84cbdd78c7-br9mh"] Oct 03 15:51:51 crc kubenswrapper[5081]: I1003 15:51:51.843506 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74f65dcb-c1bf-47cd-b29c-903fd1f4239d" path="/var/lib/kubelet/pods/74f65dcb-c1bf-47cd-b29c-903fd1f4239d/volumes" Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.361930 5081 generic.go:334] "Generic (PLEG): container finished" podID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerID="31ffa9f16a4b4b7ea9f2a616d344cb10ab401b43531108fcd040dc93ba817552" exitCode=0 Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.362010 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" event={"ID":"ca3311fd-ac43-4729-81e1-472da7bf5878","Type":"ContainerDied","Data":"31ffa9f16a4b4b7ea9f2a616d344cb10ab401b43531108fcd040dc93ba817552"} Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.362035 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" event={"ID":"ca3311fd-ac43-4729-81e1-472da7bf5878","Type":"ContainerStarted","Data":"b3e358210a5641428baf77412ce5fd9844259ac33fc39dd3fcecd4549f57dcf0"} Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.367168 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-znjbw" event={"ID":"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa","Type":"ContainerStarted","Data":"9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0"} Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.979980 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-znjbw" podStartSLOduration=3.944035654 podStartE2EDuration="6.97995765s" podCreationTimestamp="2025-10-03 15:51:46 +0000 UTC" firstStartedPulling="2025-10-03 15:51:48.300851368 +0000 UTC m=+1427.266407981" lastFinishedPulling="2025-10-03 15:51:51.336773364 +0000 UTC m=+1430.302329977" observedRunningTime="2025-10-03 15:51:52.415825756 +0000 UTC m=+1431.381382389" watchObservedRunningTime="2025-10-03 15:51:52.97995765 +0000 UTC m=+1431.945514283" Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.990363 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.990691 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-central-agent" containerID="cri-o://3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8" gracePeriod=30 Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.990728 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="sg-core" containerID="cri-o://23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6" gracePeriod=30 Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.990758 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="proxy-httpd" containerID="cri-o://717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357" gracePeriod=30 Oct 03 15:51:52 crc kubenswrapper[5081]: I1003 15:51:52.990820 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-notification-agent" containerID="cri-o://80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737" gracePeriod=30 Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.376962 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" event={"ID":"ca3311fd-ac43-4729-81e1-472da7bf5878","Type":"ContainerStarted","Data":"d4a68c5c897302f035bc3b86f9336410e2d31a6118c0346d07b8cde661598f36"} Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.377888 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.380546 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerID="717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357" exitCode=0 Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.380587 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerID="23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6" exitCode=2 Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.380591 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerDied","Data":"717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357"} Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.380639 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerDied","Data":"23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6"} Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.404201 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" podStartSLOduration=3.404179423 podStartE2EDuration="3.404179423s" podCreationTimestamp="2025-10-03 15:51:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:53.399262461 +0000 UTC m=+1432.364819084" watchObservedRunningTime="2025-10-03 15:51:53.404179423 +0000 UTC m=+1432.369736036" Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.629567 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.629785 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-log" containerID="cri-o://418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076" gracePeriod=30 Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.629872 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-api" containerID="cri-o://f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d" gracePeriod=30 Oct 03 15:51:53 crc kubenswrapper[5081]: I1003 15:51:53.990760 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:54 crc kubenswrapper[5081]: I1003 15:51:54.391380 5081 generic.go:334] "Generic (PLEG): container finished" podID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerID="418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076" exitCode=143 Oct 03 15:51:54 crc kubenswrapper[5081]: I1003 15:51:54.391479 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5a13d17-4b60-408a-be28-f9e8eb1d2465","Type":"ContainerDied","Data":"418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076"} Oct 03 15:51:54 crc kubenswrapper[5081]: I1003 15:51:54.394898 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerID="3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8" exitCode=0 Oct 03 15:51:54 crc kubenswrapper[5081]: I1003 15:51:54.394954 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerDied","Data":"3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8"} Oct 03 15:51:56 crc kubenswrapper[5081]: I1003 15:51:56.435486 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:51:56 crc kubenswrapper[5081]: I1003 15:51:56.436058 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="5e54a24e-4043-40a3-8715-bb461d3f1bde" containerName="kube-state-metrics" containerID="cri-o://39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706" gracePeriod=30 Oct 03 15:51:56 crc kubenswrapper[5081]: I1003 15:51:56.683613 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:56 crc kubenswrapper[5081]: I1003 15:51:56.684086 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:56 crc kubenswrapper[5081]: I1003 15:51:56.746358 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.089945 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.196823 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.207689 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb7t9\" (UniqueName: \"kubernetes.io/projected/5e54a24e-4043-40a3-8715-bb461d3f1bde-kube-api-access-tb7t9\") pod \"5e54a24e-4043-40a3-8715-bb461d3f1bde\" (UID: \"5e54a24e-4043-40a3-8715-bb461d3f1bde\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.215090 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e54a24e-4043-40a3-8715-bb461d3f1bde-kube-api-access-tb7t9" (OuterVolumeSpecName: "kube-api-access-tb7t9") pod "5e54a24e-4043-40a3-8715-bb461d3f1bde" (UID: "5e54a24e-4043-40a3-8715-bb461d3f1bde"). InnerVolumeSpecName "kube-api-access-tb7t9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.287081 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.309772 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mdrn\" (UniqueName: \"kubernetes.io/projected/e5a13d17-4b60-408a-be28-f9e8eb1d2465-kube-api-access-4mdrn\") pod \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.309830 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-sg-core-conf-yaml\") pod \"fc280ae3-abda-44e0-8a70-01165eb826fd\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.309992 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-log-httpd\") pod \"fc280ae3-abda-44e0-8a70-01165eb826fd\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310018 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5a13d17-4b60-408a-be28-f9e8eb1d2465-logs\") pod \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310046 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-run-httpd\") pod \"fc280ae3-abda-44e0-8a70-01165eb826fd\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310067 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-combined-ca-bundle\") pod \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310124 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-combined-ca-bundle\") pod \"fc280ae3-abda-44e0-8a70-01165eb826fd\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310144 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-config-data\") pod \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\" (UID: \"e5a13d17-4b60-408a-be28-f9e8eb1d2465\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310173 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-scripts\") pod \"fc280ae3-abda-44e0-8a70-01165eb826fd\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310222 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqxfb\" (UniqueName: \"kubernetes.io/projected/fc280ae3-abda-44e0-8a70-01165eb826fd-kube-api-access-hqxfb\") pod \"fc280ae3-abda-44e0-8a70-01165eb826fd\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310284 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-config-data\") pod \"fc280ae3-abda-44e0-8a70-01165eb826fd\" (UID: \"fc280ae3-abda-44e0-8a70-01165eb826fd\") " Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310734 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb7t9\" (UniqueName: \"kubernetes.io/projected/5e54a24e-4043-40a3-8715-bb461d3f1bde-kube-api-access-tb7t9\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.310983 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fc280ae3-abda-44e0-8a70-01165eb826fd" (UID: "fc280ae3-abda-44e0-8a70-01165eb826fd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.317138 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fc280ae3-abda-44e0-8a70-01165eb826fd" (UID: "fc280ae3-abda-44e0-8a70-01165eb826fd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.317354 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5a13d17-4b60-408a-be28-f9e8eb1d2465-logs" (OuterVolumeSpecName: "logs") pod "e5a13d17-4b60-408a-be28-f9e8eb1d2465" (UID: "e5a13d17-4b60-408a-be28-f9e8eb1d2465"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.325761 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-scripts" (OuterVolumeSpecName: "scripts") pod "fc280ae3-abda-44e0-8a70-01165eb826fd" (UID: "fc280ae3-abda-44e0-8a70-01165eb826fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.327124 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc280ae3-abda-44e0-8a70-01165eb826fd-kube-api-access-hqxfb" (OuterVolumeSpecName: "kube-api-access-hqxfb") pod "fc280ae3-abda-44e0-8a70-01165eb826fd" (UID: "fc280ae3-abda-44e0-8a70-01165eb826fd"). InnerVolumeSpecName "kube-api-access-hqxfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.328509 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5a13d17-4b60-408a-be28-f9e8eb1d2465-kube-api-access-4mdrn" (OuterVolumeSpecName: "kube-api-access-4mdrn") pod "e5a13d17-4b60-408a-be28-f9e8eb1d2465" (UID: "e5a13d17-4b60-408a-be28-f9e8eb1d2465"). InnerVolumeSpecName "kube-api-access-4mdrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.350954 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-config-data" (OuterVolumeSpecName: "config-data") pod "e5a13d17-4b60-408a-be28-f9e8eb1d2465" (UID: "e5a13d17-4b60-408a-be28-f9e8eb1d2465"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.360003 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fc280ae3-abda-44e0-8a70-01165eb826fd" (UID: "fc280ae3-abda-44e0-8a70-01165eb826fd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.360847 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5a13d17-4b60-408a-be28-f9e8eb1d2465" (UID: "e5a13d17-4b60-408a-be28-f9e8eb1d2465"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414183 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414209 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5a13d17-4b60-408a-be28-f9e8eb1d2465-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414220 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc280ae3-abda-44e0-8a70-01165eb826fd-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414228 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414236 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5a13d17-4b60-408a-be28-f9e8eb1d2465-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414244 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414252 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqxfb\" (UniqueName: \"kubernetes.io/projected/fc280ae3-abda-44e0-8a70-01165eb826fd-kube-api-access-hqxfb\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414262 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mdrn\" (UniqueName: \"kubernetes.io/projected/e5a13d17-4b60-408a-be28-f9e8eb1d2465-kube-api-access-4mdrn\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.414269 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.415284 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc280ae3-abda-44e0-8a70-01165eb826fd" (UID: "fc280ae3-abda-44e0-8a70-01165eb826fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.422991 5081 generic.go:334] "Generic (PLEG): container finished" podID="5e54a24e-4043-40a3-8715-bb461d3f1bde" containerID="39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706" exitCode=2 Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.423104 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e54a24e-4043-40a3-8715-bb461d3f1bde","Type":"ContainerDied","Data":"39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706"} Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.423141 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e54a24e-4043-40a3-8715-bb461d3f1bde","Type":"ContainerDied","Data":"09637ff84fb5c81e7dafeae53f186da31375de17584dca598589319d3f76d2be"} Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.423167 5081 scope.go:117] "RemoveContainer" containerID="39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.423347 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.435690 5081 generic.go:334] "Generic (PLEG): container finished" podID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerID="f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d" exitCode=0 Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.435745 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5a13d17-4b60-408a-be28-f9e8eb1d2465","Type":"ContainerDied","Data":"f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d"} Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.435772 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e5a13d17-4b60-408a-be28-f9e8eb1d2465","Type":"ContainerDied","Data":"eac9124ace114c38dfd8b03d753df510607e828f4eda79f015a82ea8677cad2e"} Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.435831 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.439913 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-config-data" (OuterVolumeSpecName: "config-data") pod "fc280ae3-abda-44e0-8a70-01165eb826fd" (UID: "fc280ae3-abda-44e0-8a70-01165eb826fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.447638 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerID="80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737" exitCode=0 Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.448337 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.449085 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerDied","Data":"80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737"} Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.449146 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc280ae3-abda-44e0-8a70-01165eb826fd","Type":"ContainerDied","Data":"ac01462b1ca622ec2946a71a2de8be24d616f76afa7a6eb285398b4e85510c73"} Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.475599 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.496168 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.508923 5081 scope.go:117] "RemoveContainer" containerID="39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.509436 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706\": container with ID starting with 39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706 not found: ID does not exist" containerID="39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.509471 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706"} err="failed to get container status \"39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706\": rpc error: code = NotFound desc = could not find container \"39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706\": container with ID starting with 39b3e5bfd390e5abf7e24094c4e5174eaf32e2f2f8d1ef6ac1c15490fa89a706 not found: ID does not exist" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.509499 5081 scope.go:117] "RemoveContainer" containerID="f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.522698 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.523809 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="sg-core" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.523839 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="sg-core" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.523862 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-central-agent" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.523871 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-central-agent" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.523884 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="proxy-httpd" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.523893 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="proxy-httpd" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.523907 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-api" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.523914 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-api" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.523944 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-notification-agent" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.523956 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-notification-agent" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.523980 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e54a24e-4043-40a3-8715-bb461d3f1bde" containerName="kube-state-metrics" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.523989 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e54a24e-4043-40a3-8715-bb461d3f1bde" containerName="kube-state-metrics" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.524001 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-log" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.524010 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-log" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.524232 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-log" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.524261 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e54a24e-4043-40a3-8715-bb461d3f1bde" containerName="kube-state-metrics" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.524277 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-notification-agent" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.526050 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.526088 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc280ae3-abda-44e0-8a70-01165eb826fd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.527119 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" containerName="nova-api-api" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.527191 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="ceilometer-central-agent" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.527211 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="proxy-httpd" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.527242 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" containerName="sg-core" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.531205 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.535727 5081 scope.go:117] "RemoveContainer" containerID="418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.536457 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-nlgst" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.536740 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.536859 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.546657 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.553680 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.565814 5081 scope.go:117] "RemoveContainer" containerID="f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.566234 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d\": container with ID starting with f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d not found: ID does not exist" containerID="f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.566262 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d"} err="failed to get container status \"f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d\": rpc error: code = NotFound desc = could not find container \"f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d\": container with ID starting with f14099ec06cf0852dbe5aaf6d36d9d5cbc750e77fe5cea09a45bd242f7fd305d not found: ID does not exist" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.566287 5081 scope.go:117] "RemoveContainer" containerID="418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.566889 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076\": container with ID starting with 418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076 not found: ID does not exist" containerID="418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.566911 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076"} err="failed to get container status \"418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076\": rpc error: code = NotFound desc = could not find container \"418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076\": container with ID starting with 418e5a639a516b346ca56a7a51daffb5af0cc435a597b69703b7ac4d80796076 not found: ID does not exist" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.566924 5081 scope.go:117] "RemoveContainer" containerID="717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.568717 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.592998 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.596285 5081 scope.go:117] "RemoveContainer" containerID="23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.604421 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.611879 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.617108 5081 scope.go:117] "RemoveContainer" containerID="80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.621427 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.624244 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.627212 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.627289 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.627373 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kdnv\" (UniqueName: \"kubernetes.io/projected/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-api-access-9kdnv\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.627421 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.627492 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.627615 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.632372 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.633943 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.636121 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.636291 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.636319 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.646477 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.659371 5081 scope.go:117] "RemoveContainer" containerID="3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.659834 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.728946 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.729252 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b3c210b-ba38-4b85-9295-8ace6e76d38a-logs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.729412 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-log-httpd\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.729503 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730211 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730264 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730295 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-run-httpd\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730458 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730492 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4qvr\" (UniqueName: \"kubernetes.io/projected/2b3c210b-ba38-4b85-9295-8ace6e76d38a-kube-api-access-m4qvr\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730524 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730614 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-public-tls-certs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730656 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-scripts\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730764 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730792 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-config-data\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.730844 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-config-data\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.731152 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7xsn\" (UniqueName: \"kubernetes.io/projected/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-kube-api-access-l7xsn\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.731219 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kdnv\" (UniqueName: \"kubernetes.io/projected/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-api-access-9kdnv\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.736660 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.737150 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.740305 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.751907 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kdnv\" (UniqueName: \"kubernetes.io/projected/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-api-access-9kdnv\") pod \"kube-state-metrics-0\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.831105 5081 scope.go:117] "RemoveContainer" containerID="717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.831534 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357\": container with ID starting with 717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357 not found: ID does not exist" containerID="717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.831586 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357"} err="failed to get container status \"717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357\": rpc error: code = NotFound desc = could not find container \"717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357\": container with ID starting with 717360d933d629e993d81fdca00190d62c6c1c4b1c96e2d549c4ea1021e51357 not found: ID does not exist" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.831608 5081 scope.go:117] "RemoveContainer" containerID="23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.832007 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6\": container with ID starting with 23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6 not found: ID does not exist" containerID="23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832065 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6"} err="failed to get container status \"23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6\": rpc error: code = NotFound desc = could not find container \"23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6\": container with ID starting with 23cb297dcce83837d3ab57a0f929685773b31b3e4e85e148fb60e7931e2e4ac6 not found: ID does not exist" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832101 5081 scope.go:117] "RemoveContainer" containerID="80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.832536 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737\": container with ID starting with 80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737 not found: ID does not exist" containerID="80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832572 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b3c210b-ba38-4b85-9295-8ace6e76d38a-logs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832600 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-log-httpd\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832627 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832650 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832665 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-run-httpd\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832743 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832764 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4qvr\" (UniqueName: \"kubernetes.io/projected/2b3c210b-ba38-4b85-9295-8ace6e76d38a-kube-api-access-m4qvr\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832786 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-public-tls-certs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832807 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-scripts\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832850 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832869 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-config-data\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832904 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-config-data\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832917 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-log-httpd\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832935 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7xsn\" (UniqueName: \"kubernetes.io/projected/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-kube-api-access-l7xsn\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.833033 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b3c210b-ba38-4b85-9295-8ace6e76d38a-logs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.832591 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737"} err="failed to get container status \"80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737\": rpc error: code = NotFound desc = could not find container \"80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737\": container with ID starting with 80ef5b40f810451c43690e45d92c4932fe8785bebc5ebe56b051968a9bd84737 not found: ID does not exist" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.833169 5081 scope.go:117] "RemoveContainer" containerID="3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8" Oct 03 15:51:57 crc kubenswrapper[5081]: E1003 15:51:57.835208 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8\": container with ID starting with 3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8 not found: ID does not exist" containerID="3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.835259 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8"} err="failed to get container status \"3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8\": rpc error: code = NotFound desc = could not find container \"3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8\": container with ID starting with 3d29df552629d3b8470dd84366dc2ca4ac694fb7b94279d9edc744eb520795a8 not found: ID does not exist" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.835714 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-run-httpd\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.837364 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-config-data\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.837493 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.839508 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-public-tls-certs\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.839528 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-config-data\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.840070 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-scripts\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.842102 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e54a24e-4043-40a3-8715-bb461d3f1bde" path="/var/lib/kubelet/pods/5e54a24e-4043-40a3-8715-bb461d3f1bde/volumes" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.842220 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.842784 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5a13d17-4b60-408a-be28-f9e8eb1d2465" path="/var/lib/kubelet/pods/e5a13d17-4b60-408a-be28-f9e8eb1d2465/volumes" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.844252 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc280ae3-abda-44e0-8a70-01165eb826fd" path="/var/lib/kubelet/pods/fc280ae3-abda-44e0-8a70-01165eb826fd/volumes" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.847313 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.848112 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.849734 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4qvr\" (UniqueName: \"kubernetes.io/projected/2b3c210b-ba38-4b85-9295-8ace6e76d38a-kube-api-access-m4qvr\") pod \"nova-api-0\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " pod="openstack/nova-api-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.851488 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7xsn\" (UniqueName: \"kubernetes.io/projected/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-kube-api-access-l7xsn\") pod \"ceilometer-0\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " pod="openstack/ceilometer-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.864907 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.925054 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-znjbw"] Oct 03 15:51:57 crc kubenswrapper[5081]: I1003 15:51:57.943330 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:51:58 crc kubenswrapper[5081]: I1003 15:51:58.123522 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:51:58 crc kubenswrapper[5081]: I1003 15:51:58.349883 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:51:58 crc kubenswrapper[5081]: I1003 15:51:58.458190 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b12ba3f3-51d4-4c3d-9677-d0a632be0974","Type":"ContainerStarted","Data":"bc3c2b9a3a6daa1cd43fdcfd4af1c20e96c47e13c21bc29f6870194bd99c6762"} Oct 03 15:51:58 crc kubenswrapper[5081]: I1003 15:51:58.496809 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:58 crc kubenswrapper[5081]: W1003 15:51:58.498853 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7f3546b_0b3e_4ad9_b9e2_b4b9c328c7dd.slice/crio-e73e2cd06d815db1771d202ca3c3a10765e4d5db49f2a969a5ef82006ed9c475 WatchSource:0}: Error finding container e73e2cd06d815db1771d202ca3c3a10765e4d5db49f2a969a5ef82006ed9c475: Status 404 returned error can't find the container with id e73e2cd06d815db1771d202ca3c3a10765e4d5db49f2a969a5ef82006ed9c475 Oct 03 15:51:58 crc kubenswrapper[5081]: I1003 15:51:58.596199 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:51:58 crc kubenswrapper[5081]: W1003 15:51:58.597408 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b3c210b_ba38_4b85_9295_8ace6e76d38a.slice/crio-82a9c27999a4752d958fb2fffd75687ae0c62cd38a17097dd0fdb07063ea7306 WatchSource:0}: Error finding container 82a9c27999a4752d958fb2fffd75687ae0c62cd38a17097dd0fdb07063ea7306: Status 404 returned error can't find the container with id 82a9c27999a4752d958fb2fffd75687ae0c62cd38a17097dd0fdb07063ea7306 Oct 03 15:51:58 crc kubenswrapper[5081]: I1003 15:51:58.735126 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:51:58 crc kubenswrapper[5081]: I1003 15:51:58.990075 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.018531 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.474117 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b3c210b-ba38-4b85-9295-8ace6e76d38a","Type":"ContainerStarted","Data":"f406b6bac8c9eaf564ad577e983184c4f00408afd07e45e9c2fd066d200e4b86"} Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.474435 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b3c210b-ba38-4b85-9295-8ace6e76d38a","Type":"ContainerStarted","Data":"3ce687b2462708350b2a1178d7176d56169cfe586d16c0fdab2b578dca1120db"} Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.474447 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b3c210b-ba38-4b85-9295-8ace6e76d38a","Type":"ContainerStarted","Data":"82a9c27999a4752d958fb2fffd75687ae0c62cd38a17097dd0fdb07063ea7306"} Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.476046 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b12ba3f3-51d4-4c3d-9677-d0a632be0974","Type":"ContainerStarted","Data":"375aa21783d8942e6463ead5ec2f108cd31f251a593b6147df3ae5e0bcca62a7"} Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.476184 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.477384 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerStarted","Data":"8b431c0a423fbaf2a3bbd90b24a17f276c86388352c80a35cd6787533c9aaff5"} Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.477405 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerStarted","Data":"e73e2cd06d815db1771d202ca3c3a10765e4d5db49f2a969a5ef82006ed9c475"} Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.477691 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-znjbw" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="registry-server" containerID="cri-o://9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0" gracePeriod=2 Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.500647 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.505502 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.505474399 podStartE2EDuration="2.505474399s" podCreationTimestamp="2025-10-03 15:51:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:51:59.502473083 +0000 UTC m=+1438.468029706" watchObservedRunningTime="2025-10-03 15:51:59.505474399 +0000 UTC m=+1438.471031022" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.559278 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.141079015 podStartE2EDuration="2.559251624s" podCreationTimestamp="2025-10-03 15:51:57 +0000 UTC" firstStartedPulling="2025-10-03 15:51:58.356154767 +0000 UTC m=+1437.321711380" lastFinishedPulling="2025-10-03 15:51:58.774327366 +0000 UTC m=+1437.739883989" observedRunningTime="2025-10-03 15:51:59.551280345 +0000 UTC m=+1438.516836958" watchObservedRunningTime="2025-10-03 15:51:59.559251624 +0000 UTC m=+1438.524808247" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.661759 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-pzvf2"] Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.664008 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.667077 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.667398 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.673000 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pzvf2"] Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.780585 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6jdv\" (UniqueName: \"kubernetes.io/projected/f3c69102-25cd-43c8-ab93-3c0137f6a666-kube-api-access-z6jdv\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.781843 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.781893 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-scripts\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.782044 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-config-data\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.885023 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.885448 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-scripts\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.885572 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-config-data\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.885719 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6jdv\" (UniqueName: \"kubernetes.io/projected/f3c69102-25cd-43c8-ab93-3c0137f6a666-kube-api-access-z6jdv\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.896711 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-scripts\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.896862 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-config-data\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.901549 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:51:59 crc kubenswrapper[5081]: I1003 15:51:59.905691 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6jdv\" (UniqueName: \"kubernetes.io/projected/f3c69102-25cd-43c8-ab93-3c0137f6a666-kube-api-access-z6jdv\") pod \"nova-cell1-cell-mapping-pzvf2\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.001549 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.090884 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-utilities\") pod \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.090959 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-catalog-content\") pod \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.091002 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92rnk\" (UniqueName: \"kubernetes.io/projected/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-kube-api-access-92rnk\") pod \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\" (UID: \"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa\") " Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.091947 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-utilities" (OuterVolumeSpecName: "utilities") pod "fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" (UID: "fd3d31bb-8624-443c-81cf-a0dc57ce9aaa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.094889 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-kube-api-access-92rnk" (OuterVolumeSpecName: "kube-api-access-92rnk") pod "fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" (UID: "fd3d31bb-8624-443c-81cf-a0dc57ce9aaa"). InnerVolumeSpecName "kube-api-access-92rnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.152751 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" (UID: "fd3d31bb-8624-443c-81cf-a0dc57ce9aaa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.153446 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.193354 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.193474 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92rnk\" (UniqueName: \"kubernetes.io/projected/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-kube-api-access-92rnk\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.193488 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.492933 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerStarted","Data":"67e0fb702d4234a1b0d0a102cd751cf6468ba64ac6753ed7cb0d8b2eac9a6620"} Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.495812 5081 generic.go:334] "Generic (PLEG): container finished" podID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerID="9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0" exitCode=0 Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.496609 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-znjbw" event={"ID":"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa","Type":"ContainerDied","Data":"9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0"} Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.496729 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-znjbw" event={"ID":"fd3d31bb-8624-443c-81cf-a0dc57ce9aaa","Type":"ContainerDied","Data":"61ad3c79212bdb2bdb8f4fae967af0edf0046f0379322cadb3339914d14992b1"} Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.496751 5081 scope.go:117] "RemoveContainer" containerID="9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.496770 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-znjbw" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.523889 5081 scope.go:117] "RemoveContainer" containerID="bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.546741 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-znjbw"] Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.558189 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-znjbw"] Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.568489 5081 scope.go:117] "RemoveContainer" containerID="8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.609871 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pzvf2"] Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.615028 5081 scope.go:117] "RemoveContainer" containerID="9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0" Oct 03 15:52:00 crc kubenswrapper[5081]: E1003 15:52:00.615622 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0\": container with ID starting with 9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0 not found: ID does not exist" containerID="9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.615649 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0"} err="failed to get container status \"9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0\": rpc error: code = NotFound desc = could not find container \"9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0\": container with ID starting with 9cb942fa32b35d8901c71644392e3d2f838556496d3575272d867d9fe49a05e0 not found: ID does not exist" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.615670 5081 scope.go:117] "RemoveContainer" containerID="bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899" Oct 03 15:52:00 crc kubenswrapper[5081]: E1003 15:52:00.616064 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899\": container with ID starting with bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899 not found: ID does not exist" containerID="bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.616093 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899"} err="failed to get container status \"bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899\": rpc error: code = NotFound desc = could not find container \"bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899\": container with ID starting with bc47fa982d59bd8ae83981255f31d969c0cb4b42f3db19e1ddf6ee27836c3899 not found: ID does not exist" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.616109 5081 scope.go:117] "RemoveContainer" containerID="8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061" Oct 03 15:52:00 crc kubenswrapper[5081]: E1003 15:52:00.616624 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061\": container with ID starting with 8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061 not found: ID does not exist" containerID="8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.616653 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061"} err="failed to get container status \"8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061\": rpc error: code = NotFound desc = could not find container \"8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061\": container with ID starting with 8c5a32688c86196d85e0dc24c97b9aabb038e53c2ae41c91a1001889e79ce061 not found: ID does not exist" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.905718 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.966023 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66d94ddf6f-mws9m"] Oct 03 15:52:00 crc kubenswrapper[5081]: I1003 15:52:00.967157 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" podUID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerName="dnsmasq-dns" containerID="cri-o://00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed" gracePeriod=10 Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.494369 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.506408 5081 generic.go:334] "Generic (PLEG): container finished" podID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerID="00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed" exitCode=0 Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.506474 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" event={"ID":"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7","Type":"ContainerDied","Data":"00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed"} Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.506507 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" event={"ID":"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7","Type":"ContainerDied","Data":"ad1da3570a5f822c14ef8d114b524bd5a183f23fa3c7c3a20c089c4dd6128e4e"} Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.506528 5081 scope.go:117] "RemoveContainer" containerID="00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.506684 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d94ddf6f-mws9m" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.509771 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pzvf2" event={"ID":"f3c69102-25cd-43c8-ab93-3c0137f6a666","Type":"ContainerStarted","Data":"16c0bbdb9a05648233faf864130036eaa9a7791b3b9c884c02732899ba53da61"} Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.509808 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pzvf2" event={"ID":"f3c69102-25cd-43c8-ab93-3c0137f6a666","Type":"ContainerStarted","Data":"2c204c4b31e06e3029420d96e9c7fead4a250ba9c0eaf65c2398c76f4a76139a"} Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.529221 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-config\") pod \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.529628 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-nb\") pod \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.529764 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-sb\") pod \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.529877 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-swift-storage-0\") pod \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.529967 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhpbj\" (UniqueName: \"kubernetes.io/projected/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-kube-api-access-fhpbj\") pod \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.530148 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-svc\") pod \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\" (UID: \"723e589e-9d8f-46cc-92e0-ff8cc9b34ab7\") " Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.538551 5081 scope.go:117] "RemoveContainer" containerID="9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.562971 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-pzvf2" podStartSLOduration=2.562951713 podStartE2EDuration="2.562951713s" podCreationTimestamp="2025-10-03 15:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:52:01.560774691 +0000 UTC m=+1440.526331314" watchObservedRunningTime="2025-10-03 15:52:01.562951713 +0000 UTC m=+1440.528508326" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.564748 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerStarted","Data":"bed36466fe1268345d6f627bd239d7ce9f7778475f6eebfd5d7fc91f0dd52ac7"} Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.570161 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-kube-api-access-fhpbj" (OuterVolumeSpecName: "kube-api-access-fhpbj") pod "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" (UID: "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7"). InnerVolumeSpecName "kube-api-access-fhpbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.614032 5081 scope.go:117] "RemoveContainer" containerID="00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed" Oct 03 15:52:01 crc kubenswrapper[5081]: E1003 15:52:01.616040 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed\": container with ID starting with 00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed not found: ID does not exist" containerID="00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.616073 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed"} err="failed to get container status \"00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed\": rpc error: code = NotFound desc = could not find container \"00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed\": container with ID starting with 00b5f2b4bcf4ade5ca6bd509fffb43f76d4ac7d38c0468fd6a5c675d3d4de9ed not found: ID does not exist" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.616101 5081 scope.go:117] "RemoveContainer" containerID="9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0" Oct 03 15:52:01 crc kubenswrapper[5081]: E1003 15:52:01.616524 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0\": container with ID starting with 9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0 not found: ID does not exist" containerID="9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.616547 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0"} err="failed to get container status \"9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0\": rpc error: code = NotFound desc = could not find container \"9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0\": container with ID starting with 9d0fb2c551985f7d6073d7dbca4c86f28bda658a170e9ce79d575b68335cf0b0 not found: ID does not exist" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.632737 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhpbj\" (UniqueName: \"kubernetes.io/projected/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-kube-api-access-fhpbj\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.642443 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" (UID: "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.652143 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" (UID: "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.662544 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-config" (OuterVolumeSpecName: "config") pod "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" (UID: "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.670297 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" (UID: "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.691579 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" (UID: "723e589e-9d8f-46cc-92e0-ff8cc9b34ab7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.734941 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.734979 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.734992 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.735004 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.735013 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.861086 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" path="/var/lib/kubelet/pods/fd3d31bb-8624-443c-81cf-a0dc57ce9aaa/volumes" Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.862442 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66d94ddf6f-mws9m"] Oct 03 15:52:01 crc kubenswrapper[5081]: I1003 15:52:01.862495 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66d94ddf6f-mws9m"] Oct 03 15:52:03 crc kubenswrapper[5081]: I1003 15:52:03.860759 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" path="/var/lib/kubelet/pods/723e589e-9d8f-46cc-92e0-ff8cc9b34ab7/volumes" Oct 03 15:52:04 crc kubenswrapper[5081]: I1003 15:52:04.601632 5081 generic.go:334] "Generic (PLEG): container finished" podID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerID="89981972b868abd13c79f66a72e4f87fdc5817fa2838d8777f081581ead1b3d7" exitCode=1 Oct 03 15:52:04 crc kubenswrapper[5081]: I1003 15:52:04.601680 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerDied","Data":"89981972b868abd13c79f66a72e4f87fdc5817fa2838d8777f081581ead1b3d7"} Oct 03 15:52:04 crc kubenswrapper[5081]: I1003 15:52:04.601843 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-central-agent" containerID="cri-o://8b431c0a423fbaf2a3bbd90b24a17f276c86388352c80a35cd6787533c9aaff5" gracePeriod=30 Oct 03 15:52:04 crc kubenswrapper[5081]: I1003 15:52:04.601903 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-notification-agent" containerID="cri-o://67e0fb702d4234a1b0d0a102cd751cf6468ba64ac6753ed7cb0d8b2eac9a6620" gracePeriod=30 Oct 03 15:52:04 crc kubenswrapper[5081]: I1003 15:52:04.601888 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="sg-core" containerID="cri-o://bed36466fe1268345d6f627bd239d7ce9f7778475f6eebfd5d7fc91f0dd52ac7" gracePeriod=30 Oct 03 15:52:05 crc kubenswrapper[5081]: I1003 15:52:05.612075 5081 generic.go:334] "Generic (PLEG): container finished" podID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerID="bed36466fe1268345d6f627bd239d7ce9f7778475f6eebfd5d7fc91f0dd52ac7" exitCode=2 Oct 03 15:52:05 crc kubenswrapper[5081]: I1003 15:52:05.612365 5081 generic.go:334] "Generic (PLEG): container finished" podID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerID="67e0fb702d4234a1b0d0a102cd751cf6468ba64ac6753ed7cb0d8b2eac9a6620" exitCode=0 Oct 03 15:52:05 crc kubenswrapper[5081]: I1003 15:52:05.612151 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerDied","Data":"bed36466fe1268345d6f627bd239d7ce9f7778475f6eebfd5d7fc91f0dd52ac7"} Oct 03 15:52:05 crc kubenswrapper[5081]: I1003 15:52:05.612403 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerDied","Data":"67e0fb702d4234a1b0d0a102cd751cf6468ba64ac6753ed7cb0d8b2eac9a6620"} Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.625001 5081 generic.go:334] "Generic (PLEG): container finished" podID="f3c69102-25cd-43c8-ab93-3c0137f6a666" containerID="16c0bbdb9a05648233faf864130036eaa9a7791b3b9c884c02732899ba53da61" exitCode=0 Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.625073 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pzvf2" event={"ID":"f3c69102-25cd-43c8-ab93-3c0137f6a666","Type":"ContainerDied","Data":"16c0bbdb9a05648233faf864130036eaa9a7791b3b9c884c02732899ba53da61"} Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.628609 5081 generic.go:334] "Generic (PLEG): container finished" podID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerID="8b431c0a423fbaf2a3bbd90b24a17f276c86388352c80a35cd6787533c9aaff5" exitCode=0 Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.628642 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerDied","Data":"8b431c0a423fbaf2a3bbd90b24a17f276c86388352c80a35cd6787533c9aaff5"} Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.821299 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938031 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-scripts\") pod \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938166 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-sg-core-conf-yaml\") pod \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938228 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-log-httpd\") pod \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938253 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-combined-ca-bundle\") pod \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938420 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-config-data\") pod \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938485 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7xsn\" (UniqueName: \"kubernetes.io/projected/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-kube-api-access-l7xsn\") pod \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938518 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-run-httpd\") pod \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\" (UID: \"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd\") " Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938772 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" (UID: "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.938916 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" (UID: "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.939202 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.939225 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.944640 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-scripts" (OuterVolumeSpecName: "scripts") pod "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" (UID: "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.945150 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-kube-api-access-l7xsn" (OuterVolumeSpecName: "kube-api-access-l7xsn") pod "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" (UID: "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd"). InnerVolumeSpecName "kube-api-access-l7xsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:52:06 crc kubenswrapper[5081]: I1003 15:52:06.974942 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" (UID: "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.015369 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" (UID: "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.040955 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.040995 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.041009 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7xsn\" (UniqueName: \"kubernetes.io/projected/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-kube-api-access-l7xsn\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.041026 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.055385 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-config-data" (OuterVolumeSpecName: "config-data") pod "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" (UID: "b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.146114 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.641327 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd","Type":"ContainerDied","Data":"e73e2cd06d815db1771d202ca3c3a10765e4d5db49f2a969a5ef82006ed9c475"} Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.641381 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.641402 5081 scope.go:117] "RemoveContainer" containerID="89981972b868abd13c79f66a72e4f87fdc5817fa2838d8777f081581ead1b3d7" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.692949 5081 scope.go:117] "RemoveContainer" containerID="bed36466fe1268345d6f627bd239d7ce9f7778475f6eebfd5d7fc91f0dd52ac7" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.696695 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.708469 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.720670 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721552 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="extract-utilities" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721593 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="extract-utilities" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721623 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="registry-server" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721632 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="registry-server" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721653 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-notification-agent" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721661 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-notification-agent" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721680 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="sg-core" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721688 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="sg-core" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721706 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-central-agent" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721716 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-central-agent" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721728 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="proxy-httpd" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721736 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="proxy-httpd" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721751 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerName="init" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721761 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerName="init" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721788 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="extract-content" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721795 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="extract-content" Oct 03 15:52:07 crc kubenswrapper[5081]: E1003 15:52:07.721810 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerName="dnsmasq-dns" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.721818 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerName="dnsmasq-dns" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.722064 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-central-agent" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.722089 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="sg-core" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.722103 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="ceilometer-notification-agent" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.722116 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="723e589e-9d8f-46cc-92e0-ff8cc9b34ab7" containerName="dnsmasq-dns" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.722126 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" containerName="proxy-httpd" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.722138 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd3d31bb-8624-443c-81cf-a0dc57ce9aaa" containerName="registry-server" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.724317 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.727164 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.728517 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.728707 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.729262 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.730786 5081 scope.go:117] "RemoveContainer" containerID="67e0fb702d4234a1b0d0a102cd751cf6468ba64ac6753ed7cb0d8b2eac9a6620" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.760869 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.760956 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-scripts\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.761078 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.761101 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.761124 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-config-data\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.761223 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7wx4\" (UniqueName: \"kubernetes.io/projected/9c38ca50-e27f-42f4-b828-12ca75618d53-kube-api-access-c7wx4\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.761411 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-log-httpd\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.761448 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-run-httpd\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.763043 5081 scope.go:117] "RemoveContainer" containerID="8b431c0a423fbaf2a3bbd90b24a17f276c86388352c80a35cd6787533c9aaff5" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.855020 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd" path="/var/lib/kubelet/pods/b7f3546b-0b3e-4ad9-b9e2-b4b9c328c7dd/volumes" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.864537 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-log-httpd\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.864653 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-run-httpd\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.864784 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.864855 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-scripts\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.864921 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.864942 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.864965 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-config-data\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.865001 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7wx4\" (UniqueName: \"kubernetes.io/projected/9c38ca50-e27f-42f4-b828-12ca75618d53-kube-api-access-c7wx4\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.867150 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-log-httpd\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.868112 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-run-httpd\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.871146 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-scripts\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.872434 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.876230 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.877019 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-config-data\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.880914 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.884187 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7wx4\" (UniqueName: \"kubernetes.io/projected/9c38ca50-e27f-42f4-b828-12ca75618d53-kube-api-access-c7wx4\") pod \"ceilometer-0\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " pod="openstack/ceilometer-0" Oct 03 15:52:07 crc kubenswrapper[5081]: I1003 15:52:07.896892 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.048338 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.059248 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.124457 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.124512 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.172735 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-config-data\") pod \"f3c69102-25cd-43c8-ab93-3c0137f6a666\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.173129 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6jdv\" (UniqueName: \"kubernetes.io/projected/f3c69102-25cd-43c8-ab93-3c0137f6a666-kube-api-access-z6jdv\") pod \"f3c69102-25cd-43c8-ab93-3c0137f6a666\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.173161 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-scripts\") pod \"f3c69102-25cd-43c8-ab93-3c0137f6a666\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.173244 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-combined-ca-bundle\") pod \"f3c69102-25cd-43c8-ab93-3c0137f6a666\" (UID: \"f3c69102-25cd-43c8-ab93-3c0137f6a666\") " Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.179783 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3c69102-25cd-43c8-ab93-3c0137f6a666-kube-api-access-z6jdv" (OuterVolumeSpecName: "kube-api-access-z6jdv") pod "f3c69102-25cd-43c8-ab93-3c0137f6a666" (UID: "f3c69102-25cd-43c8-ab93-3c0137f6a666"). InnerVolumeSpecName "kube-api-access-z6jdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.186257 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-scripts" (OuterVolumeSpecName: "scripts") pod "f3c69102-25cd-43c8-ab93-3c0137f6a666" (UID: "f3c69102-25cd-43c8-ab93-3c0137f6a666"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.207234 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3c69102-25cd-43c8-ab93-3c0137f6a666" (UID: "f3c69102-25cd-43c8-ab93-3c0137f6a666"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.207834 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-config-data" (OuterVolumeSpecName: "config-data") pod "f3c69102-25cd-43c8-ab93-3c0137f6a666" (UID: "f3c69102-25cd-43c8-ab93-3c0137f6a666"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.277010 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.277048 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6jdv\" (UniqueName: \"kubernetes.io/projected/f3c69102-25cd-43c8-ab93-3c0137f6a666-kube-api-access-z6jdv\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.277058 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.277066 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c69102-25cd-43c8-ab93-3c0137f6a666-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.523363 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:52:08 crc kubenswrapper[5081]: W1003 15:52:08.523633 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c38ca50_e27f_42f4_b828_12ca75618d53.slice/crio-5cea3734be751a28ab4a74fa7c96c0f8ddd8c8f6782a315b997aea60d80e315f WatchSource:0}: Error finding container 5cea3734be751a28ab4a74fa7c96c0f8ddd8c8f6782a315b997aea60d80e315f: Status 404 returned error can't find the container with id 5cea3734be751a28ab4a74fa7c96c0f8ddd8c8f6782a315b997aea60d80e315f Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.665170 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pzvf2" event={"ID":"f3c69102-25cd-43c8-ab93-3c0137f6a666","Type":"ContainerDied","Data":"2c204c4b31e06e3029420d96e9c7fead4a250ba9c0eaf65c2398c76f4a76139a"} Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.665236 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c204c4b31e06e3029420d96e9c7fead4a250ba9c0eaf65c2398c76f4a76139a" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.665191 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pzvf2" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.678794 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerStarted","Data":"5cea3734be751a28ab4a74fa7c96c0f8ddd8c8f6782a315b997aea60d80e315f"} Oct 03 15:52:08 crc kubenswrapper[5081]: E1003 15:52:08.778288 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3c69102_25cd_43c8_ab93_3c0137f6a666.slice/crio-2c204c4b31e06e3029420d96e9c7fead4a250ba9c0eaf65c2398c76f4a76139a\": RecentStats: unable to find data in memory cache]" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.835407 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.835768 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-log" containerID="cri-o://3ce687b2462708350b2a1178d7176d56169cfe586d16c0fdab2b578dca1120db" gracePeriod=30 Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.835832 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-api" containerID="cri-o://f406b6bac8c9eaf564ad577e983184c4f00408afd07e45e9c2fd066d200e4b86" gracePeriod=30 Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.842463 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": EOF" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.842675 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": EOF" Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.852592 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.852826 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="82268010-ad26-4d43-9789-ca15b8e3e394" containerName="nova-scheduler-scheduler" containerID="cri-o://6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c" gracePeriod=30 Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.868899 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.869176 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-log" containerID="cri-o://5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40" gracePeriod=30 Oct 03 15:52:08 crc kubenswrapper[5081]: I1003 15:52:08.869753 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-metadata" containerID="cri-o://b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c" gracePeriod=30 Oct 03 15:52:09 crc kubenswrapper[5081]: I1003 15:52:09.696252 5081 generic.go:334] "Generic (PLEG): container finished" podID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerID="3ce687b2462708350b2a1178d7176d56169cfe586d16c0fdab2b578dca1120db" exitCode=143 Oct 03 15:52:09 crc kubenswrapper[5081]: I1003 15:52:09.696332 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b3c210b-ba38-4b85-9295-8ace6e76d38a","Type":"ContainerDied","Data":"3ce687b2462708350b2a1178d7176d56169cfe586d16c0fdab2b578dca1120db"} Oct 03 15:52:09 crc kubenswrapper[5081]: I1003 15:52:09.697862 5081 generic.go:334] "Generic (PLEG): container finished" podID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerID="5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40" exitCode=143 Oct 03 15:52:09 crc kubenswrapper[5081]: I1003 15:52:09.697905 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b857716-836c-4a65-87e8-43ddd0e8ff4c","Type":"ContainerDied","Data":"5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40"} Oct 03 15:52:10 crc kubenswrapper[5081]: E1003 15:52:10.695993 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c is running failed: container process not found" containerID="6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 15:52:10 crc kubenswrapper[5081]: E1003 15:52:10.696668 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c is running failed: container process not found" containerID="6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 15:52:10 crc kubenswrapper[5081]: E1003 15:52:10.696958 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c is running failed: container process not found" containerID="6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 15:52:10 crc kubenswrapper[5081]: E1003 15:52:10.697069 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="82268010-ad26-4d43-9789-ca15b8e3e394" containerName="nova-scheduler-scheduler" Oct 03 15:52:10 crc kubenswrapper[5081]: I1003 15:52:10.707246 5081 generic.go:334] "Generic (PLEG): container finished" podID="82268010-ad26-4d43-9789-ca15b8e3e394" containerID="6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c" exitCode=0 Oct 03 15:52:10 crc kubenswrapper[5081]: I1003 15:52:10.707322 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"82268010-ad26-4d43-9789-ca15b8e3e394","Type":"ContainerDied","Data":"6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c"} Oct 03 15:52:10 crc kubenswrapper[5081]: I1003 15:52:10.709513 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerStarted","Data":"e89a72236f59fd3fd90bec6d4e3cb013c31ae0d3b3cd93b35b04de7bbc1b544a"} Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.238105 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.333816 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfrdf\" (UniqueName: \"kubernetes.io/projected/82268010-ad26-4d43-9789-ca15b8e3e394-kube-api-access-jfrdf\") pod \"82268010-ad26-4d43-9789-ca15b8e3e394\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.333917 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-combined-ca-bundle\") pod \"82268010-ad26-4d43-9789-ca15b8e3e394\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.334087 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-config-data\") pod \"82268010-ad26-4d43-9789-ca15b8e3e394\" (UID: \"82268010-ad26-4d43-9789-ca15b8e3e394\") " Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.354269 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82268010-ad26-4d43-9789-ca15b8e3e394-kube-api-access-jfrdf" (OuterVolumeSpecName: "kube-api-access-jfrdf") pod "82268010-ad26-4d43-9789-ca15b8e3e394" (UID: "82268010-ad26-4d43-9789-ca15b8e3e394"). InnerVolumeSpecName "kube-api-access-jfrdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.375446 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "82268010-ad26-4d43-9789-ca15b8e3e394" (UID: "82268010-ad26-4d43-9789-ca15b8e3e394"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.384644 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-config-data" (OuterVolumeSpecName: "config-data") pod "82268010-ad26-4d43-9789-ca15b8e3e394" (UID: "82268010-ad26-4d43-9789-ca15b8e3e394"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.436385 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.436515 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfrdf\" (UniqueName: \"kubernetes.io/projected/82268010-ad26-4d43-9789-ca15b8e3e394-kube-api-access-jfrdf\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.436606 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82268010-ad26-4d43-9789-ca15b8e3e394-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.720359 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"82268010-ad26-4d43-9789-ca15b8e3e394","Type":"ContainerDied","Data":"b3178aeb75e883a283ca3319eb1c7cc4f1fd6765d2c0e186b9fbb59741b7d6d3"} Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.720404 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.720424 5081 scope.go:117] "RemoveContainer" containerID="6df1d44a04b0f45a4612e4f410a4f6f1454768a04318dc5e8c96823a1283c93c" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.759160 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.774338 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.786719 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:52:11 crc kubenswrapper[5081]: E1003 15:52:11.787177 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82268010-ad26-4d43-9789-ca15b8e3e394" containerName="nova-scheduler-scheduler" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.787195 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="82268010-ad26-4d43-9789-ca15b8e3e394" containerName="nova-scheduler-scheduler" Oct 03 15:52:11 crc kubenswrapper[5081]: E1003 15:52:11.787221 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c69102-25cd-43c8-ab93-3c0137f6a666" containerName="nova-manage" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.787232 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c69102-25cd-43c8-ab93-3c0137f6a666" containerName="nova-manage" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.787420 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c69102-25cd-43c8-ab93-3c0137f6a666" containerName="nova-manage" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.787443 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="82268010-ad26-4d43-9789-ca15b8e3e394" containerName="nova-scheduler-scheduler" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.788116 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.790008 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.818038 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.839916 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82268010-ad26-4d43-9789-ca15b8e3e394" path="/var/lib/kubelet/pods/82268010-ad26-4d43-9789-ca15b8e3e394/volumes" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.846196 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.846262 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ws5w\" (UniqueName: \"kubernetes.io/projected/7abf152b-a4ec-4114-bb59-491582952b05-kube-api-access-2ws5w\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.846351 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-config-data\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.948534 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-config-data\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.949396 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.949472 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ws5w\" (UniqueName: \"kubernetes.io/projected/7abf152b-a4ec-4114-bb59-491582952b05-kube-api-access-2ws5w\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.954667 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.960094 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-config-data\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:11 crc kubenswrapper[5081]: I1003 15:52:11.973352 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ws5w\" (UniqueName: \"kubernetes.io/projected/7abf152b-a4ec-4114-bb59-491582952b05-kube-api-access-2ws5w\") pod \"nova-scheduler-0\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " pod="openstack/nova-scheduler-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.105497 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.508522 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.564198 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b857716-836c-4a65-87e8-43ddd0e8ff4c-logs\") pod \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.564268 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-config-data\") pod \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.564302 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-nova-metadata-tls-certs\") pod \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.564331 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-combined-ca-bundle\") pod \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.564392 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s77b4\" (UniqueName: \"kubernetes.io/projected/6b857716-836c-4a65-87e8-43ddd0e8ff4c-kube-api-access-s77b4\") pod \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\" (UID: \"6b857716-836c-4a65-87e8-43ddd0e8ff4c\") " Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.564913 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b857716-836c-4a65-87e8-43ddd0e8ff4c-logs" (OuterVolumeSpecName: "logs") pod "6b857716-836c-4a65-87e8-43ddd0e8ff4c" (UID: "6b857716-836c-4a65-87e8-43ddd0e8ff4c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.565338 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b857716-836c-4a65-87e8-43ddd0e8ff4c-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.583450 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b857716-836c-4a65-87e8-43ddd0e8ff4c-kube-api-access-s77b4" (OuterVolumeSpecName: "kube-api-access-s77b4") pod "6b857716-836c-4a65-87e8-43ddd0e8ff4c" (UID: "6b857716-836c-4a65-87e8-43ddd0e8ff4c"). InnerVolumeSpecName "kube-api-access-s77b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.612707 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b857716-836c-4a65-87e8-43ddd0e8ff4c" (UID: "6b857716-836c-4a65-87e8-43ddd0e8ff4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.667183 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.667206 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s77b4\" (UniqueName: \"kubernetes.io/projected/6b857716-836c-4a65-87e8-43ddd0e8ff4c-kube-api-access-s77b4\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.667973 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6b857716-836c-4a65-87e8-43ddd0e8ff4c" (UID: "6b857716-836c-4a65-87e8-43ddd0e8ff4c"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.673787 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:52:12 crc kubenswrapper[5081]: W1003 15:52:12.678651 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7abf152b_a4ec_4114_bb59_491582952b05.slice/crio-2ce8e8a7fea6b491c4bd829ce12a91127cbdbfbe19f13602cc66fdab14e50c0b WatchSource:0}: Error finding container 2ce8e8a7fea6b491c4bd829ce12a91127cbdbfbe19f13602cc66fdab14e50c0b: Status 404 returned error can't find the container with id 2ce8e8a7fea6b491c4bd829ce12a91127cbdbfbe19f13602cc66fdab14e50c0b Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.685865 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-config-data" (OuterVolumeSpecName: "config-data") pod "6b857716-836c-4a65-87e8-43ddd0e8ff4c" (UID: "6b857716-836c-4a65-87e8-43ddd0e8ff4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.735351 5081 generic.go:334] "Generic (PLEG): container finished" podID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerID="b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c" exitCode=0 Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.735802 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.735820 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b857716-836c-4a65-87e8-43ddd0e8ff4c","Type":"ContainerDied","Data":"b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c"} Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.736456 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b857716-836c-4a65-87e8-43ddd0e8ff4c","Type":"ContainerDied","Data":"74d9beafe8806621f4279f2792aa5a565bfeb0e5c661683dc098d348ca1f440d"} Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.736484 5081 scope.go:117] "RemoveContainer" containerID="b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.742120 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerStarted","Data":"d458d29d42b270ecda96b4132690bafef24693a1bcafe119e2fb5b9bab9353b5"} Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.748232 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7abf152b-a4ec-4114-bb59-491582952b05","Type":"ContainerStarted","Data":"2ce8e8a7fea6b491c4bd829ce12a91127cbdbfbe19f13602cc66fdab14e50c0b"} Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.765953 5081 scope.go:117] "RemoveContainer" containerID="5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.773088 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.773112 5081 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b857716-836c-4a65-87e8-43ddd0e8ff4c-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.781579 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.790315 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.827815 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:52:12 crc kubenswrapper[5081]: E1003 15:52:12.828345 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-metadata" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.828369 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-metadata" Oct 03 15:52:12 crc kubenswrapper[5081]: E1003 15:52:12.828416 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-log" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.828424 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-log" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.828735 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-log" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.828769 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-metadata" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.830105 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.831969 5081 scope.go:117] "RemoveContainer" containerID="b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.832235 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.833368 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.833612 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 15:52:12 crc kubenswrapper[5081]: E1003 15:52:12.835930 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c\": container with ID starting with b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c not found: ID does not exist" containerID="b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.835978 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c"} err="failed to get container status \"b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c\": rpc error: code = NotFound desc = could not find container \"b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c\": container with ID starting with b9f62f89a9bae416ff8aa180d5dc1965a31945812830da157f004ccfdc1a120c not found: ID does not exist" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.836003 5081 scope.go:117] "RemoveContainer" containerID="5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40" Oct 03 15:52:12 crc kubenswrapper[5081]: E1003 15:52:12.836256 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40\": container with ID starting with 5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40 not found: ID does not exist" containerID="5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.836286 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40"} err="failed to get container status \"5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40\": rpc error: code = NotFound desc = could not find container \"5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40\": container with ID starting with 5c8b2e0a6430e8ecf49cbcc366770b51b72cb2757e59d156aea2d4bab839fb40 not found: ID does not exist" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.875306 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.875348 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-config-data\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.875377 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp9ss\" (UniqueName: \"kubernetes.io/projected/6d5aeac4-dc04-4a3e-93cf-16b00842df35-kube-api-access-tp9ss\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.875417 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.875433 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d5aeac4-dc04-4a3e-93cf-16b00842df35-logs\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.977921 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.978309 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-config-data\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.978352 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp9ss\" (UniqueName: \"kubernetes.io/projected/6d5aeac4-dc04-4a3e-93cf-16b00842df35-kube-api-access-tp9ss\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.978406 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.978432 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d5aeac4-dc04-4a3e-93cf-16b00842df35-logs\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.978987 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d5aeac4-dc04-4a3e-93cf-16b00842df35-logs\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.983372 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.983372 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:12 crc kubenswrapper[5081]: I1003 15:52:12.991631 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-config-data\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.002915 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp9ss\" (UniqueName: \"kubernetes.io/projected/6d5aeac4-dc04-4a3e-93cf-16b00842df35-kube-api-access-tp9ss\") pod \"nova-metadata-0\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " pod="openstack/nova-metadata-0" Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.154396 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.625130 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.760474 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerStarted","Data":"15cacaf50eb36833c76259d52ef81d26141a2d02ee727e2efd6a2a4ccbfc0c93"} Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.761702 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7abf152b-a4ec-4114-bb59-491582952b05","Type":"ContainerStarted","Data":"d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1"} Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.763737 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d5aeac4-dc04-4a3e-93cf-16b00842df35","Type":"ContainerStarted","Data":"ff922eab8e63389a4e1886bd85210654c942dd59ae88953ced077b10dd8d3f38"} Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.777363 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.777342925 podStartE2EDuration="2.777342925s" podCreationTimestamp="2025-10-03 15:52:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:52:13.777190211 +0000 UTC m=+1452.742746844" watchObservedRunningTime="2025-10-03 15:52:13.777342925 +0000 UTC m=+1452.742899568" Oct 03 15:52:13 crc kubenswrapper[5081]: I1003 15:52:13.839344 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" path="/var/lib/kubelet/pods/6b857716-836c-4a65-87e8-43ddd0e8ff4c/volumes" Oct 03 15:52:14 crc kubenswrapper[5081]: I1003 15:52:14.796587 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d5aeac4-dc04-4a3e-93cf-16b00842df35","Type":"ContainerStarted","Data":"97c93f28e63efe77b049abc94e9494220f89aeb5a45b0d485d1782988daeb773"} Oct 03 15:52:14 crc kubenswrapper[5081]: I1003 15:52:14.797284 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d5aeac4-dc04-4a3e-93cf-16b00842df35","Type":"ContainerStarted","Data":"acb498f39e09a0e5daf720cd0a2e76f5299382935696927f7ec5b9dc290467a4"} Oct 03 15:52:14 crc kubenswrapper[5081]: I1003 15:52:14.800788 5081 generic.go:334] "Generic (PLEG): container finished" podID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerID="f406b6bac8c9eaf564ad577e983184c4f00408afd07e45e9c2fd066d200e4b86" exitCode=0 Oct 03 15:52:14 crc kubenswrapper[5081]: I1003 15:52:14.800823 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b3c210b-ba38-4b85-9295-8ace6e76d38a","Type":"ContainerDied","Data":"f406b6bac8c9eaf564ad577e983184c4f00408afd07e45e9c2fd066d200e4b86"} Oct 03 15:52:14 crc kubenswrapper[5081]: I1003 15:52:14.817655 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.817631104 podStartE2EDuration="2.817631104s" podCreationTimestamp="2025-10-03 15:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:52:14.816708777 +0000 UTC m=+1453.782265390" watchObservedRunningTime="2025-10-03 15:52:14.817631104 +0000 UTC m=+1453.783187717" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.026585 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.132539 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4qvr\" (UniqueName: \"kubernetes.io/projected/2b3c210b-ba38-4b85-9295-8ace6e76d38a-kube-api-access-m4qvr\") pod \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.133069 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-public-tls-certs\") pod \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.133136 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-internal-tls-certs\") pod \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.133205 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-config-data\") pod \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.133244 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-combined-ca-bundle\") pod \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.133279 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b3c210b-ba38-4b85-9295-8ace6e76d38a-logs\") pod \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\" (UID: \"2b3c210b-ba38-4b85-9295-8ace6e76d38a\") " Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.133822 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b3c210b-ba38-4b85-9295-8ace6e76d38a-logs" (OuterVolumeSpecName: "logs") pod "2b3c210b-ba38-4b85-9295-8ace6e76d38a" (UID: "2b3c210b-ba38-4b85-9295-8ace6e76d38a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.136145 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b3c210b-ba38-4b85-9295-8ace6e76d38a-kube-api-access-m4qvr" (OuterVolumeSpecName: "kube-api-access-m4qvr") pod "2b3c210b-ba38-4b85-9295-8ace6e76d38a" (UID: "2b3c210b-ba38-4b85-9295-8ace6e76d38a"). InnerVolumeSpecName "kube-api-access-m4qvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.166003 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b3c210b-ba38-4b85-9295-8ace6e76d38a" (UID: "2b3c210b-ba38-4b85-9295-8ace6e76d38a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.169069 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-config-data" (OuterVolumeSpecName: "config-data") pod "2b3c210b-ba38-4b85-9295-8ace6e76d38a" (UID: "2b3c210b-ba38-4b85-9295-8ace6e76d38a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.187451 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2b3c210b-ba38-4b85-9295-8ace6e76d38a" (UID: "2b3c210b-ba38-4b85-9295-8ace6e76d38a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.198134 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2b3c210b-ba38-4b85-9295-8ace6e76d38a" (UID: "2b3c210b-ba38-4b85-9295-8ace6e76d38a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.235468 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.235520 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.235534 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.235545 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b3c210b-ba38-4b85-9295-8ace6e76d38a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.235576 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b3c210b-ba38-4b85-9295-8ace6e76d38a-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.235588 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4qvr\" (UniqueName: \"kubernetes.io/projected/2b3c210b-ba38-4b85-9295-8ace6e76d38a-kube-api-access-m4qvr\") on node \"crc\" DevicePath \"\"" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.818741 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b3c210b-ba38-4b85-9295-8ace6e76d38a","Type":"ContainerDied","Data":"82a9c27999a4752d958fb2fffd75687ae0c62cd38a17097dd0fdb07063ea7306"} Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.818756 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.818797 5081 scope.go:117] "RemoveContainer" containerID="f406b6bac8c9eaf564ad577e983184c4f00408afd07e45e9c2fd066d200e4b86" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.823786 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerStarted","Data":"c0cc61bb578e6b82b21ec38b2933e461e6db44834efbf165d4d23566ac8055d0"} Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.849069 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.527359278 podStartE2EDuration="8.849051358s" podCreationTimestamp="2025-10-03 15:52:07 +0000 UTC" firstStartedPulling="2025-10-03 15:52:08.526046569 +0000 UTC m=+1447.491603182" lastFinishedPulling="2025-10-03 15:52:14.847738649 +0000 UTC m=+1453.813295262" observedRunningTime="2025-10-03 15:52:15.847655448 +0000 UTC m=+1454.813212071" watchObservedRunningTime="2025-10-03 15:52:15.849051358 +0000 UTC m=+1454.814607981" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.861240 5081 scope.go:117] "RemoveContainer" containerID="3ce687b2462708350b2a1178d7176d56169cfe586d16c0fdab2b578dca1120db" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.893619 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.909698 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.918322 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 15:52:15 crc kubenswrapper[5081]: E1003 15:52:15.918865 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-api" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.918885 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-api" Oct 03 15:52:15 crc kubenswrapper[5081]: E1003 15:52:15.918904 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-log" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.918910 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-log" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.919107 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-log" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.919134 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" containerName="nova-api-api" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.920133 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.922733 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.922957 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.923170 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 15:52:15 crc kubenswrapper[5081]: I1003 15:52:15.928793 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.051178 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.051225 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/074c619e-3cb7-417e-8192-9d13725cdde5-logs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.051286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjhbl\" (UniqueName: \"kubernetes.io/projected/074c619e-3cb7-417e-8192-9d13725cdde5-kube-api-access-wjhbl\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.051339 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-public-tls-certs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.051404 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-config-data\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.051483 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.153233 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-config-data\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.153285 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.153341 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.153362 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/074c619e-3cb7-417e-8192-9d13725cdde5-logs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.153417 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjhbl\" (UniqueName: \"kubernetes.io/projected/074c619e-3cb7-417e-8192-9d13725cdde5-kube-api-access-wjhbl\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.153451 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-public-tls-certs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.155217 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/074c619e-3cb7-417e-8192-9d13725cdde5-logs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.168411 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-config-data\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.168489 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-public-tls-certs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.168901 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.168985 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.171229 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjhbl\" (UniqueName: \"kubernetes.io/projected/074c619e-3cb7-417e-8192-9d13725cdde5-kube-api-access-wjhbl\") pod \"nova-api-0\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.238659 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.706326 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:52:16 crc kubenswrapper[5081]: W1003 15:52:16.706742 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod074c619e_3cb7_417e_8192_9d13725cdde5.slice/crio-3f6f931d1334ffcab0850c9c051c18e104f77cb04c5b292b4915c95229ad8655 WatchSource:0}: Error finding container 3f6f931d1334ffcab0850c9c051c18e104f77cb04c5b292b4915c95229ad8655: Status 404 returned error can't find the container with id 3f6f931d1334ffcab0850c9c051c18e104f77cb04c5b292b4915c95229ad8655 Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.836462 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"074c619e-3cb7-417e-8192-9d13725cdde5","Type":"ContainerStarted","Data":"3f6f931d1334ffcab0850c9c051c18e104f77cb04c5b292b4915c95229ad8655"} Oct 03 15:52:16 crc kubenswrapper[5081]: I1003 15:52:16.838907 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 15:52:17 crc kubenswrapper[5081]: I1003 15:52:17.109066 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 15:52:17 crc kubenswrapper[5081]: I1003 15:52:17.469932 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 15:52:17 crc kubenswrapper[5081]: I1003 15:52:17.470773 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6b857716-836c-4a65-87e8-43ddd0e8ff4c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 15:52:17 crc kubenswrapper[5081]: I1003 15:52:17.852974 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b3c210b-ba38-4b85-9295-8ace6e76d38a" path="/var/lib/kubelet/pods/2b3c210b-ba38-4b85-9295-8ace6e76d38a/volumes" Oct 03 15:52:17 crc kubenswrapper[5081]: I1003 15:52:17.861145 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"074c619e-3cb7-417e-8192-9d13725cdde5","Type":"ContainerStarted","Data":"a11a2c348b41af6175d5a702fc5ef23abb0b4539c6157c033c27932942b63f91"} Oct 03 15:52:17 crc kubenswrapper[5081]: I1003 15:52:17.861196 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"074c619e-3cb7-417e-8192-9d13725cdde5","Type":"ContainerStarted","Data":"a66b52b10331b1c92cc1455a575bb5bad525d4a39ad38db2e4104fb8919818e2"} Oct 03 15:52:17 crc kubenswrapper[5081]: I1003 15:52:17.895382 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.895366041 podStartE2EDuration="2.895366041s" podCreationTimestamp="2025-10-03 15:52:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:52:17.891368406 +0000 UTC m=+1456.856925039" watchObservedRunningTime="2025-10-03 15:52:17.895366041 +0000 UTC m=+1456.860922654" Oct 03 15:52:18 crc kubenswrapper[5081]: I1003 15:52:18.154902 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 15:52:18 crc kubenswrapper[5081]: I1003 15:52:18.155001 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 15:52:22 crc kubenswrapper[5081]: I1003 15:52:22.107300 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 15:52:22 crc kubenswrapper[5081]: I1003 15:52:22.134141 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 15:52:22 crc kubenswrapper[5081]: I1003 15:52:22.930584 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 15:52:23 crc kubenswrapper[5081]: I1003 15:52:23.155394 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 15:52:23 crc kubenswrapper[5081]: I1003 15:52:23.155470 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 15:52:24 crc kubenswrapper[5081]: I1003 15:52:24.167779 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:52:24 crc kubenswrapper[5081]: I1003 15:52:24.167790 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:52:26 crc kubenswrapper[5081]: I1003 15:52:26.238803 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:52:26 crc kubenswrapper[5081]: I1003 15:52:26.239406 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 15:52:27 crc kubenswrapper[5081]: I1003 15:52:27.245869 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:52:27 crc kubenswrapper[5081]: I1003 15:52:27.252866 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:52:33 crc kubenswrapper[5081]: I1003 15:52:33.161423 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 15:52:33 crc kubenswrapper[5081]: I1003 15:52:33.163925 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 15:52:33 crc kubenswrapper[5081]: I1003 15:52:33.168913 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 15:52:34 crc kubenswrapper[5081]: I1003 15:52:34.002167 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 15:52:36 crc kubenswrapper[5081]: I1003 15:52:36.246259 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 15:52:36 crc kubenswrapper[5081]: I1003 15:52:36.246732 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 15:52:36 crc kubenswrapper[5081]: I1003 15:52:36.247112 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 15:52:36 crc kubenswrapper[5081]: I1003 15:52:36.247428 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 15:52:36 crc kubenswrapper[5081]: I1003 15:52:36.253425 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 15:52:36 crc kubenswrapper[5081]: I1003 15:52:36.253900 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 15:52:38 crc kubenswrapper[5081]: I1003 15:52:38.060305 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.510873 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.511731 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="79acb98c-21e6-4bad-ad5b-4ebc855e6378" containerName="openstackclient" containerID="cri-o://c817d8193802204a47e564f00d89b7300147b2f6e4a65ec259ecd73cefe10317" gracePeriod=2 Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.539621 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.552190 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.553093 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="probe" containerID="cri-o://b6fd92f8baa9b25fd91643a7fa2bd52ae9a8dc5be12a4823bad3dc1941b76522" gracePeriod=30 Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.556953 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="cinder-scheduler" containerID="cri-o://7ad6062447176d359892df549a0f432d03509731f48fa17e1a53bd11f1b7042c" gracePeriod=30 Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.606181 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-577c5877f7-gslrj"] Oct 03 15:53:00 crc kubenswrapper[5081]: E1003 15:53:00.607446 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79acb98c-21e6-4bad-ad5b-4ebc855e6378" containerName="openstackclient" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.607471 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="79acb98c-21e6-4bad-ad5b-4ebc855e6378" containerName="openstackclient" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.607851 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="79acb98c-21e6-4bad-ad5b-4ebc855e6378" containerName="openstackclient" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.616851 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.643360 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-b57b4ccd-848cl"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.645330 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.660735 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.660796 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.684595 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngn54\" (UniqueName: \"kubernetes.io/projected/fcbf652f-a193-47f7-872b-e9864a40cd0a-kube-api-access-ngn54\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.684816 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data-custom\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.684849 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-combined-ca-bundle\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.684922 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcbf652f-a193-47f7-872b-e9864a40cd0a-logs\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.684962 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.703004 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-577c5877f7-gslrj"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.724420 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-b57b4ccd-848cl"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.787857 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-combined-ca-bundle\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.787922 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data-custom\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788006 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data-custom\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788040 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5rmz\" (UniqueName: \"kubernetes.io/projected/684a3452-107a-4e1f-93a5-c063711e6377-kube-api-access-b5rmz\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788064 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-combined-ca-bundle\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788127 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788151 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcbf652f-a193-47f7-872b-e9864a40cd0a-logs\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788174 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788228 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngn54\" (UniqueName: \"kubernetes.io/projected/fcbf652f-a193-47f7-872b-e9864a40cd0a-kube-api-access-ngn54\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.788279 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/684a3452-107a-4e1f-93a5-c063711e6377-logs\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.790540 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcbf652f-a193-47f7-872b-e9864a40cd0a-logs\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.808652 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.808976 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api-log" containerID="cri-o://34c7f45d2c9cb180d7c08a560ddcfcf95e23f3fa8b56979e4ea9e41b424f888b" gracePeriod=30 Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.809415 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api" containerID="cri-o://11222103517d49608c40c5db5a80c4fb0e6936aa3b9146f78928edb3436b5c6e" gracePeriod=30 Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.814389 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.846383 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-combined-ca-bundle\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.846888 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngn54\" (UniqueName: \"kubernetes.io/projected/fcbf652f-a193-47f7-872b-e9864a40cd0a-kube-api-access-ngn54\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.847230 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data-custom\") pod \"barbican-worker-577c5877f7-gslrj\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.892002 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-combined-ca-bundle\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.892353 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data-custom\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.892431 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5rmz\" (UniqueName: \"kubernetes.io/projected/684a3452-107a-4e1f-93a5-c063711e6377-kube-api-access-b5rmz\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.892490 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.892541 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/684a3452-107a-4e1f-93a5-c063711e6377-logs\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.892987 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.893071 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/684a3452-107a-4e1f-93a5-c063711e6377-logs\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.913213 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-combined-ca-bundle\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.918826 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data-custom\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.921810 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:00 crc kubenswrapper[5081]: I1003 15:53:00.971132 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.000019 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5rmz\" (UniqueName: \"kubernetes.io/projected/684a3452-107a-4e1f-93a5-c063711e6377-kube-api-access-b5rmz\") pod \"barbican-keystone-listener-b57b4ccd-848cl\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:01 crc kubenswrapper[5081]: E1003 15:53:01.101987 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 03 15:53:01 crc kubenswrapper[5081]: E1003 15:53:01.102077 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data podName:7060c6c8-bbe8-47ae-8ef2-4358291dbb61 nodeName:}" failed. No retries permitted until 2025-10-03 15:53:01.60204016 +0000 UTC m=+1500.567596773 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data") pod "rabbitmq-server-0" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61") : configmap "rabbitmq-config-data" not found Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.141994 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5df6654c96-f7vp5"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.143662 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.197623 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.198104 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="openstack-network-exporter" containerID="cri-o://9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1" gracePeriod=300 Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.204531 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-internal-tls-certs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.204585 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd5af114-d170-46ce-8a46-ec0b65ddb545-logs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.204646 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98td7\" (UniqueName: \"kubernetes.io/projected/bd5af114-d170-46ce-8a46-ec0b65ddb545-kube-api-access-98td7\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.204677 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.204716 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-public-tls-certs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.204809 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data-custom\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.204851 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-combined-ca-bundle\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.247314 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5df6654c96-f7vp5"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.269713 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance0aea-account-delete-zkcjp"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.271460 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance0aea-account-delete-zkcjp" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.281977 5081 generic.go:334] "Generic (PLEG): container finished" podID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerID="34c7f45d2c9cb180d7c08a560ddcfcf95e23f3fa8b56979e4ea9e41b424f888b" exitCode=143 Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.282038 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6547ead1-44e3-45f3-a668-fff64776f1f6","Type":"ContainerDied","Data":"34c7f45d2c9cb180d7c08a560ddcfcf95e23f3fa8b56979e4ea9e41b424f888b"} Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.298946 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.306731 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data-custom\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.306805 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-combined-ca-bundle\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.306864 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-internal-tls-certs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.306881 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd5af114-d170-46ce-8a46-ec0b65ddb545-logs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.306926 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98td7\" (UniqueName: \"kubernetes.io/projected/bd5af114-d170-46ce-8a46-ec0b65ddb545-kube-api-access-98td7\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.306950 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.306979 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-public-tls-certs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.310008 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd5af114-d170-46ce-8a46-ec0b65ddb545-logs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.322918 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.326227 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.329395 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="openstack-network-exporter" containerID="cri-o://466a0e5a4bc9f929bdfea1ce7776a7ad5217cf962aae9b86e3401192bd5301ea" gracePeriod=300 Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.335849 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-public-tls-certs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.336068 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-combined-ca-bundle\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.336376 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-internal-tls-certs\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.340482 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data-custom\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.376668 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance0aea-account-delete-zkcjp"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.396389 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98td7\" (UniqueName: \"kubernetes.io/projected/bd5af114-d170-46ce-8a46-ec0b65ddb545-kube-api-access-98td7\") pod \"barbican-api-5df6654c96-f7vp5\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.408391 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinderedee-account-delete-tcdxx"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.411545 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gqdd\" (UniqueName: \"kubernetes.io/projected/a650c48d-76ae-45a3-b79c-e6e014009769-kube-api-access-6gqdd\") pod \"glance0aea-account-delete-zkcjp\" (UID: \"a650c48d-76ae-45a3-b79c-e6e014009769\") " pod="openstack/glance0aea-account-delete-zkcjp" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.412976 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedee-account-delete-tcdxx" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.456785 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.457064 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="ovn-northd" containerID="cri-o://8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" gracePeriod=30 Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.457523 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="openstack-network-exporter" containerID="cri-o://49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71" gracePeriod=30 Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.489807 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.516873 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gqdd\" (UniqueName: \"kubernetes.io/projected/a650c48d-76ae-45a3-b79c-e6e014009769-kube-api-access-6gqdd\") pod \"glance0aea-account-delete-zkcjp\" (UID: \"a650c48d-76ae-45a3-b79c-e6e014009769\") " pod="openstack/glance0aea-account-delete-zkcjp" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.516999 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcf48\" (UniqueName: \"kubernetes.io/projected/e86e6033-7336-4deb-bf90-8c4941d56542-kube-api-access-kcf48\") pod \"cinderedee-account-delete-tcdxx\" (UID: \"e86e6033-7336-4deb-bf90-8c4941d56542\") " pod="openstack/cinderedee-account-delete-tcdxx" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.526966 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement9594-account-delete-mxv78"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.538928 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement9594-account-delete-mxv78" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.549629 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderedee-account-delete-tcdxx"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.558219 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.612733 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement9594-account-delete-mxv78"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.620116 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jcxw\" (UniqueName: \"kubernetes.io/projected/4198a604-1b99-4822-9377-afaaef616d15-kube-api-access-8jcxw\") pod \"placement9594-account-delete-mxv78\" (UID: \"4198a604-1b99-4822-9377-afaaef616d15\") " pod="openstack/placement9594-account-delete-mxv78" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.620174 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcf48\" (UniqueName: \"kubernetes.io/projected/e86e6033-7336-4deb-bf90-8c4941d56542-kube-api-access-kcf48\") pod \"cinderedee-account-delete-tcdxx\" (UID: \"e86e6033-7336-4deb-bf90-8c4941d56542\") " pod="openstack/cinderedee-account-delete-tcdxx" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.621745 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gqdd\" (UniqueName: \"kubernetes.io/projected/a650c48d-76ae-45a3-b79c-e6e014009769-kube-api-access-6gqdd\") pod \"glance0aea-account-delete-zkcjp\" (UID: \"a650c48d-76ae-45a3-b79c-e6e014009769\") " pod="openstack/glance0aea-account-delete-zkcjp" Oct 03 15:53:01 crc kubenswrapper[5081]: E1003 15:53:01.621867 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 03 15:53:01 crc kubenswrapper[5081]: E1003 15:53:01.621924 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data podName:7060c6c8-bbe8-47ae-8ef2-4358291dbb61 nodeName:}" failed. No retries permitted until 2025-10-03 15:53:02.621906219 +0000 UTC m=+1501.587462892 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data") pod "rabbitmq-server-0" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61") : configmap "rabbitmq-config-data" not found Oct 03 15:53:01 crc kubenswrapper[5081]: E1003 15:53:01.622456 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:01 crc kubenswrapper[5081]: E1003 15:53:01.622493 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data podName:5344c615-93ae-4a4a-95b1-3bbe3327f42e nodeName:}" failed. No retries permitted until 2025-10-03 15:53:02.122484895 +0000 UTC m=+1501.088041508 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data") pod "rabbitmq-cell1-server-0" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e") : configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.640868 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance0aea-account-delete-zkcjp" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.725362 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="ovsdbserver-nb" containerID="cri-o://e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec" gracePeriod=300 Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.749874 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jcxw\" (UniqueName: \"kubernetes.io/projected/4198a604-1b99-4822-9377-afaaef616d15-kube-api-access-8jcxw\") pod \"placement9594-account-delete-mxv78\" (UID: \"4198a604-1b99-4822-9377-afaaef616d15\") " pod="openstack/placement9594-account-delete-mxv78" Oct 03 15:53:01 crc kubenswrapper[5081]: E1003 15:53:01.795458 5081 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.59:50798->38.102.83.59:44537: write tcp 38.102.83.59:50798->38.102.83.59:44537: write: broken pipe Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.795504 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron4548-account-delete-zmhl4"] Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.810359 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jcxw\" (UniqueName: \"kubernetes.io/projected/4198a604-1b99-4822-9377-afaaef616d15-kube-api-access-8jcxw\") pod \"placement9594-account-delete-mxv78\" (UID: \"4198a604-1b99-4822-9377-afaaef616d15\") " pod="openstack/placement9594-account-delete-mxv78" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.848600 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcf48\" (UniqueName: \"kubernetes.io/projected/e86e6033-7336-4deb-bf90-8c4941d56542-kube-api-access-kcf48\") pod \"cinderedee-account-delete-tcdxx\" (UID: \"e86e6033-7336-4deb-bf90-8c4941d56542\") " pod="openstack/cinderedee-account-delete-tcdxx" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.903710 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron4548-account-delete-zmhl4" Oct 03 15:53:01 crc kubenswrapper[5081]: I1003 15:53:01.961014 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement9594-account-delete-mxv78" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.036018 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron4548-account-delete-zmhl4"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.045985 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-g95hn"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.075140 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzlj2\" (UniqueName: \"kubernetes.io/projected/83b5887d-7cec-4aef-8d5c-041346dbc89f-kube-api-access-kzlj2\") pod \"neutron4548-account-delete-zmhl4\" (UID: \"83b5887d-7cec-4aef-8d5c-041346dbc89f\") " pod="openstack/neutron4548-account-delete-zmhl4" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.103962 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedee-account-delete-tcdxx" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.105813 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-g95hn"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.175533 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-4j44h"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.175791 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-4j44h" podUID="930b2d10-e4fc-4e90-902a-2cb2068e2fcf" containerName="openstack-network-exporter" containerID="cri-o://e241addceb133533afe94ad230a8d11c2bf695f52e2d891b7ebe29478660b475" gracePeriod=30 Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.194692 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzlj2\" (UniqueName: \"kubernetes.io/projected/83b5887d-7cec-4aef-8d5c-041346dbc89f-kube-api-access-kzlj2\") pod \"neutron4548-account-delete-zmhl4\" (UID: \"83b5887d-7cec-4aef-8d5c-041346dbc89f\") " pod="openstack/neutron4548-account-delete-zmhl4" Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.195959 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.196134 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data podName:5344c615-93ae-4a4a-95b1-3bbe3327f42e nodeName:}" failed. No retries permitted until 2025-10-03 15:53:03.196114299 +0000 UTC m=+1502.161670912 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data") pod "rabbitmq-cell1-server-0" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e") : configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.220812 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-52hh5"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.247897 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kngsq"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.294699 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzlj2\" (UniqueName: \"kubernetes.io/projected/83b5887d-7cec-4aef-8d5c-041346dbc89f-kube-api-access-kzlj2\") pod \"neutron4548-account-delete-zmhl4\" (UID: \"83b5887d-7cec-4aef-8d5c-041346dbc89f\") " pod="openstack/neutron4548-account-delete-zmhl4" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.327066 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-pnxvc"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.349710 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-pnxvc"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.372740 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron4548-account-delete-zmhl4" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.476659 5081 generic.go:334] "Generic (PLEG): container finished" podID="f3630909-8ada-4296-af7c-8135f2221e39" containerID="9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1" exitCode=2 Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.477055 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f3630909-8ada-4296-af7c-8135f2221e39","Type":"ContainerDied","Data":"9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1"} Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.483524 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-9759p"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.484983 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="ovsdbserver-sb" containerID="cri-o://2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068" gracePeriod=299 Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.532646 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-mlcdv"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.587661 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-9759p"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.603359 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_262a88da-d2e8-428f-b38a-1b59714ddfe7/ovsdbserver-nb/0.log" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.603408 5081 generic.go:334] "Generic (PLEG): container finished" podID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerID="466a0e5a4bc9f929bdfea1ce7776a7ad5217cf962aae9b86e3401192bd5301ea" exitCode=2 Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.603490 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"262a88da-d2e8-428f-b38a-1b59714ddfe7","Type":"ContainerDied","Data":"466a0e5a4bc9f929bdfea1ce7776a7ad5217cf962aae9b86e3401192bd5301ea"} Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.610713 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-mlcdv"] Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.633115 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.653538 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data podName:7060c6c8-bbe8-47ae-8ef2-4358291dbb61 nodeName:}" failed. No retries permitted until 2025-10-03 15:53:04.653506643 +0000 UTC m=+1503.619063256 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data") pod "rabbitmq-server-0" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61") : configmap "rabbitmq-config-data" not found Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.689359 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapic1ef-account-delete-62l5c"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.698134 5081 generic.go:334] "Generic (PLEG): container finished" podID="477e7150-1a22-403b-950e-6d1547d2859c" containerID="49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71" exitCode=2 Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.699575 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"477e7150-1a22-403b-950e-6d1547d2859c","Type":"ContainerDied","Data":"49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71"} Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.699673 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic1ef-account-delete-62l5c" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.720617 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapic1ef-account-delete-62l5c"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.727131 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-765fs"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.744298 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vptqx\" (UniqueName: \"kubernetes.io/projected/6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5-kube-api-access-vptqx\") pod \"novaapic1ef-account-delete-62l5c\" (UID: \"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5\") " pod="openstack/novaapic1ef-account-delete-62l5c" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.745228 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-765fs"] Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.751971 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec is running failed: container process not found" containerID="e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.754623 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec is running failed: container process not found" containerID="e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.754757 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-pzvf2"] Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.756325 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec is running failed: container process not found" containerID="e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 03 15:53:02 crc kubenswrapper[5081]: E1003 15:53:02.756381 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="ovsdbserver-nb" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.763716 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-9zm9c"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.783827 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-9zm9c"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.801152 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-pzvf2"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.848522 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vptqx\" (UniqueName: \"kubernetes.io/projected/6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5-kube-api-access-vptqx\") pod \"novaapic1ef-account-delete-62l5c\" (UID: \"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5\") " pod="openstack/novaapic1ef-account-delete-62l5c" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.861234 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84cbdd78c7-br9mh"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.861594 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" podUID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerName="dnsmasq-dns" containerID="cri-o://d4a68c5c897302f035bc3b86f9336410e2d31a6118c0346d07b8cde661598f36" gracePeriod=10 Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.903067 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-z5l85"] Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.925886 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vptqx\" (UniqueName: \"kubernetes.io/projected/6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5-kube-api-access-vptqx\") pod \"novaapic1ef-account-delete-62l5c\" (UID: \"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5\") " pod="openstack/novaapic1ef-account-delete-62l5c" Oct 03 15:53:02 crc kubenswrapper[5081]: I1003 15:53:02.946977 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-z5l85"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.027370 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-577c5877f7-gslrj"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.056821 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.060404 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-server" containerID="cri-o://2dc48262e0ade0cd5b46c732df33fce4d98185362e199780f79f0145f57aa828" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.060917 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="swift-recon-cron" containerID="cri-o://00b89755a18526b56352f35b1330f853e78ba6ee6b50eb49214837e6f9797ab9" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.060983 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="rsync" containerID="cri-o://e80c760b9b37c5a71b037090135c5ba4f32f98d8709aa318d7bf69734c058ea2" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.072909 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-auditor" containerID="cri-o://d0329f49fa7dc846b20dc08a8389809ac26059dd282cb5b7a946f6475f240c48" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073035 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-expirer" containerID="cri-o://a7b2d7ca1d510ef79cf6048ee9579f2ac7ff3e40ff9234a031ecf02dfba25777" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073077 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-updater" containerID="cri-o://0dfae43d5dfa9c09237aa7cb9e6fcba01b60a0f0e13fdd86961e6469f5f09d3c" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073109 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-auditor" containerID="cri-o://fc2a1a4f0df9739d588ce081aaaa43ab9cfe57521cacaf41f5e2e169875cad7b" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073139 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-replicator" containerID="cri-o://962a1e3fd606faa70fe55c161c25398c016ac0969ef92d4e88b58a60f3ef02eb" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073173 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-server" containerID="cri-o://2d265cc1788a8ffd41d868c45e20c3c29c12f51c4e066b3d0b0c81546645bcfa" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073203 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-updater" containerID="cri-o://ae79f147bd8fd93a896550501e3a9434ca704c15d3e99a1d98595472b5b0f638" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073264 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-reaper" containerID="cri-o://9fb9ff61254c258e053e39687cfe871e46e9e37bc3923e11a92f9ba4e6d36e54" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073317 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-replicator" containerID="cri-o://45463607f3525f8e75aa01b96f684ea8a4d207f0bdd044315c3bcfc0933d9b65" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073361 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-server" containerID="cri-o://676eedca602ba56315b71044aa07d745f875330d2c6a9a252a84c0c20469a5b3" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073435 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-replicator" containerID="cri-o://21dea586b40c33dcc79f88530d74f5e7fcd590c00c4174c95c9b987e02a408cb" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.073479 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-auditor" containerID="cri-o://6183bf8da2a80f5b9e9698fac4e3f60d821b5c52084e202cff3ec20d564ffd21" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.110978 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-58db4df78-q9st4"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.111245 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-58db4df78-q9st4" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-log" containerID="cri-o://5667e13010e026cffae93f1f48fe2279663fa544702ab46b085a5c829713ef57" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.111383 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-58db4df78-q9st4" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-api" containerID="cri-o://683a9cea3704f28dee554ad7d0a5fd46617595e708f08884aeeafa501eeeb131" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.119167 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic1ef-account-delete-62l5c" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.157755 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.158311 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-log" containerID="cri-o://20c238aeaa3c71e4c9d06c0d2978a9a624c469b91b5eb72c44d08c1bce7fd2a9" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.158794 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-httpd" containerID="cri-o://71bc762a04da349305dfb7aba1fc1915351db7ac6e3d40db700ab69ee2492adf" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.185360 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.185639 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-log" containerID="cri-o://8d9622a52a15f0f59848d03a12a790b4b65eb968515da8ea26ad4197241e283b" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.186157 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-httpd" containerID="cri-o://7893281dd7f6e155dae82597172d11e93702c9a53318c8564135c0043e49216e" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.199807 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.199885 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data podName:5344c615-93ae-4a4a-95b1-3bbe3327f42e nodeName:}" failed. No retries permitted until 2025-10-03 15:53:05.199868308 +0000 UTC m=+1504.165424921 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data") pod "rabbitmq-cell1-server-0" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e") : configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.202490 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.247677 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-thgzm"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.308030 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" containerID="cri-o://a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" gracePeriod=29 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.315769 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7cc85979bf-f5kj9"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.316032 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7cc85979bf-f5kj9" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-httpd" containerID="cri-o://5f6d5178719cb62c16b047a9e06ac48b70916f8257a72246b0bf985d185298bd" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.319048 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7cc85979bf-f5kj9" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-server" containerID="cri-o://b5c101a43a3295f40820a6e0bc4115d66a17ebbfd93458ef2f3e176a858daf89" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.348711 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="rabbitmq" containerID="cri-o://5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e" gracePeriod=604800 Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.348871 5081 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Oct 03 15:53:03 crc kubenswrapper[5081]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 03 15:53:03 crc kubenswrapper[5081]: + source /usr/local/bin/container-scripts/functions Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNBridge=br-int Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNRemote=tcp:localhost:6642 Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNEncapType=geneve Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNAvailabilityZones= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ EnableChassisAsGateway=true Oct 03 15:53:03 crc kubenswrapper[5081]: ++ PhysicalNetworks= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNHostName= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 03 15:53:03 crc kubenswrapper[5081]: ++ ovs_dir=/var/lib/openvswitch Oct 03 15:53:03 crc kubenswrapper[5081]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 03 15:53:03 crc kubenswrapper[5081]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 03 15:53:03 crc kubenswrapper[5081]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 03 15:53:03 crc kubenswrapper[5081]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 03 15:53:03 crc kubenswrapper[5081]: + sleep 0.5 Oct 03 15:53:03 crc kubenswrapper[5081]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 03 15:53:03 crc kubenswrapper[5081]: + cleanup_ovsdb_server_semaphore Oct 03 15:53:03 crc kubenswrapper[5081]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 03 15:53:03 crc kubenswrapper[5081]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 03 15:53:03 crc kubenswrapper[5081]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-52hh5" message=< Oct 03 15:53:03 crc kubenswrapper[5081]: Exiting ovsdb-server (5) [ OK ] Oct 03 15:53:03 crc kubenswrapper[5081]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 03 15:53:03 crc kubenswrapper[5081]: + source /usr/local/bin/container-scripts/functions Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNBridge=br-int Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNRemote=tcp:localhost:6642 Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNEncapType=geneve Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNAvailabilityZones= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ EnableChassisAsGateway=true Oct 03 15:53:03 crc kubenswrapper[5081]: ++ PhysicalNetworks= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNHostName= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 03 15:53:03 crc kubenswrapper[5081]: ++ ovs_dir=/var/lib/openvswitch Oct 03 15:53:03 crc kubenswrapper[5081]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 03 15:53:03 crc kubenswrapper[5081]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 03 15:53:03 crc kubenswrapper[5081]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 03 15:53:03 crc kubenswrapper[5081]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 03 15:53:03 crc kubenswrapper[5081]: + sleep 0.5 Oct 03 15:53:03 crc kubenswrapper[5081]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 03 15:53:03 crc kubenswrapper[5081]: + cleanup_ovsdb_server_semaphore Oct 03 15:53:03 crc kubenswrapper[5081]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 03 15:53:03 crc kubenswrapper[5081]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 03 15:53:03 crc kubenswrapper[5081]: > Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.348895 5081 kuberuntime_container.go:691] "PreStop hook failed" err=< Oct 03 15:53:03 crc kubenswrapper[5081]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 03 15:53:03 crc kubenswrapper[5081]: + source /usr/local/bin/container-scripts/functions Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNBridge=br-int Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNRemote=tcp:localhost:6642 Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNEncapType=geneve Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNAvailabilityZones= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ EnableChassisAsGateway=true Oct 03 15:53:03 crc kubenswrapper[5081]: ++ PhysicalNetworks= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ OVNHostName= Oct 03 15:53:03 crc kubenswrapper[5081]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 03 15:53:03 crc kubenswrapper[5081]: ++ ovs_dir=/var/lib/openvswitch Oct 03 15:53:03 crc kubenswrapper[5081]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 03 15:53:03 crc kubenswrapper[5081]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 03 15:53:03 crc kubenswrapper[5081]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 03 15:53:03 crc kubenswrapper[5081]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 03 15:53:03 crc kubenswrapper[5081]: + sleep 0.5 Oct 03 15:53:03 crc kubenswrapper[5081]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 03 15:53:03 crc kubenswrapper[5081]: + cleanup_ovsdb_server_semaphore Oct 03 15:53:03 crc kubenswrapper[5081]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 03 15:53:03 crc kubenswrapper[5081]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 03 15:53:03 crc kubenswrapper[5081]: > pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" containerID="cri-o://d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.350406 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" containerID="cri-o://d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" gracePeriod=29 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.358985 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-thgzm"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.371171 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ff85fbc4f-f9zcx"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.371434 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5ff85fbc4f-f9zcx" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-api" containerID="cri-o://c7fd1d71f2d9ba417f029d604edbb305edd80c93b20a0706ffa0b1ed0e2b1efc" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.371784 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5ff85fbc4f-f9zcx" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-httpd" containerID="cri-o://c75af55c799a16c6d3fb1fcae9ca1ebc16c7f7ce2f3b26cb7521040390a192ad" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.384238 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.384477 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-log" containerID="cri-o://acb498f39e09a0e5daf720cd0a2e76f5299382935696927f7ec5b9dc290467a4" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.384623 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-metadata" containerID="cri-o://97c93f28e63efe77b049abc94e9494220f89aeb5a45b0d485d1782988daeb773" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.435762 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance0aea-account-delete-zkcjp"] Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.450643 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.452706 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-0aea-account-create-p852j"] Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.454624 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.470624 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-0aea-account-create-p852j"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.478902 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.478954 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 03 15:53:03 crc kubenswrapper[5081]: E1003 15:53:03.479032 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="ovn-northd" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.507917 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-edee-account-create-6jgz9"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.518636 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-edee-account-create-6jgz9"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.525038 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderedee-account-delete-tcdxx"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.568596 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-9288s"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.624877 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-9288s"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.635991 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-s54wv"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.652005 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-s54wv"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.667845 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.668309 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-log" containerID="cri-o://a66b52b10331b1c92cc1455a575bb5bad525d4a39ad38db2e4104fb8919818e2" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.668887 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-api" containerID="cri-o://a11a2c348b41af6175d5a702fc5ef23abb0b4539c6157c033c27932942b63f91" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.681857 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-c347-account-create-qnb2t"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.691344 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-c347-account-create-qnb2t"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.703991 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-7nlkd"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.724089 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-9594-account-create-4wdgs"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.738255 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f3630909-8ada-4296-af7c-8135f2221e39/ovsdbserver-sb/0.log" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.738426 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.740771 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-4j44h_930b2d10-e4fc-4e90-902a-2cb2068e2fcf/openstack-network-exporter/0.log" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.741076 5081 generic.go:334] "Generic (PLEG): container finished" podID="930b2d10-e4fc-4e90-902a-2cb2068e2fcf" containerID="e241addceb133533afe94ad230a8d11c2bf695f52e2d891b7ebe29478660b475" exitCode=2 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.741145 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-4j44h" event={"ID":"930b2d10-e4fc-4e90-902a-2cb2068e2fcf","Type":"ContainerDied","Data":"e241addceb133533afe94ad230a8d11c2bf695f52e2d891b7ebe29478660b475"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.743414 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.743477 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-7nlkd"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.746420 5081 generic.go:334] "Generic (PLEG): container finished" podID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerID="b6fd92f8baa9b25fd91643a7fa2bd52ae9a8dc5be12a4823bad3dc1941b76522" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.746474 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cbc5b56e-83ed-460e-a57d-2c51df24c5dc","Type":"ContainerDied","Data":"b6fd92f8baa9b25fd91643a7fa2bd52ae9a8dc5be12a4823bad3dc1941b76522"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.749614 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-4j44h_930b2d10-e4fc-4e90-902a-2cb2068e2fcf/openstack-network-exporter/0.log" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.749669 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.750848 5081 generic.go:334] "Generic (PLEG): container finished" podID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.750912 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-52hh5" event={"ID":"f41a1c07-9bcc-4237-869e-dff5d9c480f8","Type":"ContainerDied","Data":"d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.752469 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-577c5877f7-gslrj" event={"ID":"fcbf652f-a193-47f7-872b-e9864a40cd0a","Type":"ContainerStarted","Data":"ecd9d45527a4d18d2658290373c32d1b031b1ef50cf1527862fd18fe7523928f"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.756634 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_262a88da-d2e8-428f-b38a-1b59714ddfe7/ovsdbserver-nb/0.log" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.756681 5081 generic.go:334] "Generic (PLEG): container finished" podID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerID="e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec" exitCode=143 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.756730 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"262a88da-d2e8-428f-b38a-1b59714ddfe7","Type":"ContainerDied","Data":"e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.763544 5081 generic.go:334] "Generic (PLEG): container finished" podID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerID="d4a68c5c897302f035bc3b86f9336410e2d31a6118c0346d07b8cde661598f36" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.763636 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" event={"ID":"ca3311fd-ac43-4729-81e1-472da7bf5878","Type":"ContainerDied","Data":"d4a68c5c897302f035bc3b86f9336410e2d31a6118c0346d07b8cde661598f36"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.765101 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-9594-account-create-4wdgs"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.766383 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f3630909-8ada-4296-af7c-8135f2221e39/ovsdbserver-sb/0.log" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.766421 5081 generic.go:334] "Generic (PLEG): container finished" podID="f3630909-8ada-4296-af7c-8135f2221e39" containerID="2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068" exitCode=143 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.766465 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f3630909-8ada-4296-af7c-8135f2221e39","Type":"ContainerDied","Data":"2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.766487 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f3630909-8ada-4296-af7c-8135f2221e39","Type":"ContainerDied","Data":"d58f96855ad64e1201fbbfdc8801cbf93f8573345bac3fac1a0c5b7b45437708"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.766503 5081 scope.go:117] "RemoveContainer" containerID="9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.766638 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.774723 5081 generic.go:334] "Generic (PLEG): container finished" podID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerID="5667e13010e026cffae93f1f48fe2279663fa544702ab46b085a5c829713ef57" exitCode=143 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.774782 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58db4df78-q9st4" event={"ID":"35ad4758-5e5f-4ba3-84be-a4ae754e9048","Type":"ContainerDied","Data":"5667e13010e026cffae93f1f48fe2279663fa544702ab46b085a5c829713ef57"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.794000 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement9594-account-delete-mxv78"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.795400 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerName="galera" containerID="cri-o://de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d" gracePeriod=30 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.806313 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.822842 5081 generic.go:334] "Generic (PLEG): container finished" podID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerID="acb498f39e09a0e5daf720cd0a2e76f5299382935696927f7ec5b9dc290467a4" exitCode=143 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.822946 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d5aeac4-dc04-4a3e-93cf-16b00842df35","Type":"ContainerDied","Data":"acb498f39e09a0e5daf720cd0a2e76f5299382935696927f7ec5b9dc290467a4"} Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826606 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovn-rundir\") pod \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826641 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-config\") pod \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826663 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-metrics-certs-tls-certs\") pod \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826691 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-combined-ca-bundle\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826714 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovs-rundir\") pod \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826792 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-scripts\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826813 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config-secret\") pod \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826867 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-config\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826905 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-combined-ca-bundle\") pod \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826931 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.826958 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-ovsdbserver-sb-tls-certs\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.827006 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-metrics-certs-tls-certs\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.827038 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-767dh\" (UniqueName: \"kubernetes.io/projected/79acb98c-21e6-4bad-ad5b-4ebc855e6378-kube-api-access-767dh\") pod \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.827066 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp7k4\" (UniqueName: \"kubernetes.io/projected/f3630909-8ada-4296-af7c-8135f2221e39-kube-api-access-dp7k4\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.827098 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f3630909-8ada-4296-af7c-8135f2221e39-ovsdb-rundir\") pod \"f3630909-8ada-4296-af7c-8135f2221e39\" (UID: \"f3630909-8ada-4296-af7c-8135f2221e39\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.827130 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-combined-ca-bundle\") pod \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.827157 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config\") pod \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\" (UID: \"79acb98c-21e6-4bad-ad5b-4ebc855e6378\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.827188 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpfgt\" (UniqueName: \"kubernetes.io/projected/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-kube-api-access-hpfgt\") pod \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\" (UID: \"930b2d10-e4fc-4e90-902a-2cb2068e2fcf\") " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.831636 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "930b2d10-e4fc-4e90-902a-2cb2068e2fcf" (UID: "930b2d10-e4fc-4e90-902a-2cb2068e2fcf"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.832627 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-scripts" (OuterVolumeSpecName: "scripts") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.833645 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-config" (OuterVolumeSpecName: "config") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.835822 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-config" (OuterVolumeSpecName: "config") pod "930b2d10-e4fc-4e90-902a-2cb2068e2fcf" (UID: "930b2d10-e4fc-4e90-902a-2cb2068e2fcf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.835886 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "930b2d10-e4fc-4e90-902a-2cb2068e2fcf" (UID: "930b2d10-e4fc-4e90-902a-2cb2068e2fcf"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.842148 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3630909-8ada-4296-af7c-8135f2221e39-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.843233 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-jw7x4"] Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.867648 5081 scope.go:117] "RemoveContainer" containerID="2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.869961 5081 generic.go:334] "Generic (PLEG): container finished" podID="79acb98c-21e6-4bad-ad5b-4ebc855e6378" containerID="c817d8193802204a47e564f00d89b7300147b2f6e4a65ec259ecd73cefe10317" exitCode=137 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.870083 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.877121 5081 generic.go:334] "Generic (PLEG): container finished" podID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerID="5f6d5178719cb62c16b047a9e06ac48b70916f8257a72246b0bf985d185298bd" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.902133 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906285 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="a7b2d7ca1d510ef79cf6048ee9579f2ac7ff3e40ff9234a031ecf02dfba25777" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906317 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="0dfae43d5dfa9c09237aa7cb9e6fcba01b60a0f0e13fdd86961e6469f5f09d3c" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906327 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="fc2a1a4f0df9739d588ce081aaaa43ab9cfe57521cacaf41f5e2e169875cad7b" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906334 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="962a1e3fd606faa70fe55c161c25398c016ac0969ef92d4e88b58a60f3ef02eb" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906342 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="ae79f147bd8fd93a896550501e3a9434ca704c15d3e99a1d98595472b5b0f638" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906349 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="d0329f49fa7dc846b20dc08a8389809ac26059dd282cb5b7a946f6475f240c48" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906355 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="45463607f3525f8e75aa01b96f684ea8a4d207f0bdd044315c3bcfc0933d9b65" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906367 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="9fb9ff61254c258e053e39687cfe871e46e9e37bc3923e11a92f9ba4e6d36e54" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906373 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="6183bf8da2a80f5b9e9698fac4e3f60d821b5c52084e202cff3ec20d564ffd21" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.906380 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="21dea586b40c33dcc79f88530d74f5e7fcd590c00c4174c95c9b987e02a408cb" exitCode=0 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.910968 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79acb98c-21e6-4bad-ad5b-4ebc855e6378-kube-api-access-767dh" (OuterVolumeSpecName: "kube-api-access-767dh") pod "79acb98c-21e6-4bad-ad5b-4ebc855e6378" (UID: "79acb98c-21e6-4bad-ad5b-4ebc855e6378"). InnerVolumeSpecName "kube-api-access-767dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.912837 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-kube-api-access-hpfgt" (OuterVolumeSpecName: "kube-api-access-hpfgt") pod "930b2d10-e4fc-4e90-902a-2cb2068e2fcf" (UID: "930b2d10-e4fc-4e90-902a-2cb2068e2fcf"). InnerVolumeSpecName "kube-api-access-hpfgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.917639 5081 generic.go:334] "Generic (PLEG): container finished" podID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerID="8d9622a52a15f0f59848d03a12a790b4b65eb968515da8ea26ad4197241e283b" exitCode=143 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.920900 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3630909-8ada-4296-af7c-8135f2221e39-kube-api-access-dp7k4" (OuterVolumeSpecName: "kube-api-access-dp7k4") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "kube-api-access-dp7k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.921245 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="rabbitmq" containerID="cri-o://b01bf452cb29e0baf0468dc3879379123d94b893f16e315da708a69a521d9ace" gracePeriod=604800 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.927947 5081 generic.go:334] "Generic (PLEG): container finished" podID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerID="20c238aeaa3c71e4c9d06c0d2978a9a624c469b91b5eb72c44d08c1bce7fd2a9" exitCode=143 Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932225 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-767dh\" (UniqueName: \"kubernetes.io/projected/79acb98c-21e6-4bad-ad5b-4ebc855e6378-kube-api-access-767dh\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932264 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp7k4\" (UniqueName: \"kubernetes.io/projected/f3630909-8ada-4296-af7c-8135f2221e39-kube-api-access-dp7k4\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932279 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f3630909-8ada-4296-af7c-8135f2221e39-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932295 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpfgt\" (UniqueName: \"kubernetes.io/projected/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-kube-api-access-hpfgt\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932304 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932313 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932321 5081 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-ovs-rundir\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932343 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932355 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3630909-8ada-4296-af7c-8135f2221e39-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.932384 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.935649 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36ffd821-3040-46b1-b568-35edb1b5dc1d" path="/var/lib/kubelet/pods/36ffd821-3040-46b1-b568-35edb1b5dc1d/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.936293 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab8a197-adc0-43a7-b416-8d4770b1b646" path="/var/lib/kubelet/pods/3ab8a197-adc0-43a7-b416-8d4770b1b646/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.938201 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="556f311d-1398-48a5-bf38-7dc07f3bdfd0" path="/var/lib/kubelet/pods/556f311d-1398-48a5-bf38-7dc07f3bdfd0/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.943108 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="680afad9-2798-42bd-a115-81c10c66662a" path="/var/lib/kubelet/pods/680afad9-2798-42bd-a115-81c10c66662a/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.944251 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="692f094b-09a6-4e73-820f-5b23de67a2de" path="/var/lib/kubelet/pods/692f094b-09a6-4e73-820f-5b23de67a2de/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.944986 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7456396f-1779-43c3-9a7c-888c42e64a52" path="/var/lib/kubelet/pods/7456396f-1779-43c3-9a7c-888c42e64a52/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.945780 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9" path="/var/lib/kubelet/pods/7ffdc8e4-e2d1-4bf8-a658-fa37587d07c9/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.946838 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81d22c4d-1624-46de-80d5-6b366cd99bf8" path="/var/lib/kubelet/pods/81d22c4d-1624-46de-80d5-6b366cd99bf8/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.947719 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="828c409e-c464-4024-8da4-3262e96be555" path="/var/lib/kubelet/pods/828c409e-c464-4024-8da4-3262e96be555/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.948428 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83a84d35-e4fa-4026-b4e0-21bf2b354b0f" path="/var/lib/kubelet/pods/83a84d35-e4fa-4026-b4e0-21bf2b354b0f/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.949754 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c36939f-a099-4873-aa0e-f0b2215798e4" path="/var/lib/kubelet/pods/8c36939f-a099-4873-aa0e-f0b2215798e4/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.950521 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9b4deda-b48b-4c00-8a09-fab4b2e7f95c" path="/var/lib/kubelet/pods/b9b4deda-b48b-4c00-8a09-fab4b2e7f95c/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.951278 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdc78530-6030-49c0-8e3e-ae1b26430a90" path="/var/lib/kubelet/pods/cdc78530-6030-49c0-8e3e-ae1b26430a90/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.952252 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea8e8f30-c5b5-4fc5-a692-75b234dfe04d" path="/var/lib/kubelet/pods/ea8e8f30-c5b5-4fc5-a692-75b234dfe04d/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.954069 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3c69102-25cd-43c8-ab93-3c0137f6a666" path="/var/lib/kubelet/pods/f3c69102-25cd-43c8-ab93-3c0137f6a666/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.954751 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa9930e6-e39c-49d6-a13f-05abb8dce794" path="/var/lib/kubelet/pods/fa9930e6-e39c-49d6-a13f-05abb8dce794/volumes" Oct 03 15:53:03 crc kubenswrapper[5081]: I1003 15:53:03.981177 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "930b2d10-e4fc-4e90-902a-2cb2068e2fcf" (UID: "930b2d10-e4fc-4e90-902a-2cb2068e2fcf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.002283 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "79acb98c-21e6-4bad-ad5b-4ebc855e6378" (UID: "79acb98c-21e6-4bad-ad5b-4ebc855e6378"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.016770 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79acb98c-21e6-4bad-ad5b-4ebc855e6378" (UID: "79acb98c-21e6-4bad-ad5b-4ebc855e6378"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.034340 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.034386 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.034399 5081 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.047699 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.078280 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "79acb98c-21e6-4bad-ad5b-4ebc855e6378" (UID: "79acb98c-21e6-4bad-ad5b-4ebc855e6378"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.094478 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.095728 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.107786 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "f3630909-8ada-4296-af7c-8135f2221e39" (UID: "f3630909-8ada-4296-af7c-8135f2221e39"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.121074 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "930b2d10-e4fc-4e90-902a-2cb2068e2fcf" (UID: "930b2d10-e4fc-4e90-902a-2cb2068e2fcf"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.141224 5081 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/79acb98c-21e6-4bad-ad5b-4ebc855e6378-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.141299 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.141341 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.141352 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.141361 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/930b2d10-e4fc-4e90-902a-2cb2068e2fcf-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.141370 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3630909-8ada-4296-af7c-8135f2221e39-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195364 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7cc85979bf-f5kj9" event={"ID":"262952b6-434e-4da7-8cdb-b7073a71f13c","Type":"ContainerDied","Data":"5f6d5178719cb62c16b047a9e06ac48b70916f8257a72246b0bf985d185298bd"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195724 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-jw7x4"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195746 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-49e1-account-create-9cvsx"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195758 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"a7b2d7ca1d510ef79cf6048ee9579f2ac7ff3e40ff9234a031ecf02dfba25777"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195773 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-49e1-account-create-9cvsx"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195785 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"0dfae43d5dfa9c09237aa7cb9e6fcba01b60a0f0e13fdd86961e6469f5f09d3c"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195795 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-x8jsz"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195805 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"fc2a1a4f0df9739d588ce081aaaa43ab9cfe57521cacaf41f5e2e169875cad7b"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195819 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-x8jsz"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195830 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"962a1e3fd606faa70fe55c161c25398c016ac0969ef92d4e88b58a60f3ef02eb"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195840 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"ae79f147bd8fd93a896550501e3a9434ca704c15d3e99a1d98595472b5b0f638"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195852 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"d0329f49fa7dc846b20dc08a8389809ac26059dd282cb5b7a946f6475f240c48"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195863 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195879 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"45463607f3525f8e75aa01b96f684ea8a4d207f0bdd044315c3bcfc0933d9b65"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195892 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-mll7n"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195902 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-4548-account-create-pphwt"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195938 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-mll7n"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195950 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"9fb9ff61254c258e053e39687cfe871e46e9e37bc3923e11a92f9ba4e6d36e54"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195966 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-4548-account-create-pphwt"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195977 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6ede-account-create-fxvgw"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195987 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"6183bf8da2a80f5b9e9698fac4e3f60d821b5c52084e202cff3ec20d564ffd21"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.195997 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron4548-account-delete-zmhl4"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196009 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"21dea586b40c33dcc79f88530d74f5e7fcd590c00c4174c95c9b987e02a408cb"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196017 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8341a6f-4783-4bf6-916e-ac655208ba45","Type":"ContainerDied","Data":"8d9622a52a15f0f59848d03a12a790b4b65eb968515da8ea26ad4197241e283b"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196030 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed01e164-93b5-47c7-9ec0-7a00d6942c66","Type":"ContainerDied","Data":"20c238aeaa3c71e4c9d06c0d2978a9a624c469b91b5eb72c44d08c1bce7fd2a9"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196042 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5df6654c96-f7vp5"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196053 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-b57b4ccd-848cl"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196066 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6ede-account-create-fxvgw"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196077 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5f7bd66f74-sbcrq"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196089 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7bcffbc9c7-qbr72"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196101 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-57cd5fc748-p2rdp"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196112 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-c1ef-account-create-k8xb5"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196120 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-577c5877f7-gslrj"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196132 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic1ef-account-delete-62l5c"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196142 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-z6xfs"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196152 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-c1ef-account-create-k8xb5"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196165 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-z6xfs"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196175 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196186 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fmhsw"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196194 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196207 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fmhsw"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196219 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jcbmh"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196229 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jcbmh"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196239 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.196473 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="f1151206-1989-4b3a-bc02-176a6f3cf481" containerName="nova-cell1-conductor-conductor" containerID="cri-o://8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.197526 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-57cd5fc748-p2rdp" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api-log" containerID="cri-o://8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.198617 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-b57b4ccd-848cl"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.198897 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker-log" containerID="cri-o://424ab139179e1f0e89a94103edf7bef82522d7b08ab2d180de8ddb2ba8927c6e" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.199081 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="155bac7b-055b-4bca-a155-f5ab13dacf80" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f24a2e28a7c0ad1349c328c219041c0c090d938313a1795249776310239401b8" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.199214 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener-log" containerID="cri-o://5edccb45492f37a61a761bda119802fecb308fa81cec1b916185a47045d64830" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.199252 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker" containerID="cri-o://9764cd3101ebf1f837d65e3b2926078df427b405bf605c5170fe2cd8a77e323b" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.199308 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener" containerID="cri-o://9791a255062a856210f6f6d869def3d35e782763a70a0b45636ea36481f99787" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.199820 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7abf152b-a4ec-4114-bb59-491582952b05" containerName="nova-scheduler-scheduler" containerID="cri-o://d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.199973 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="dfe2dfd3-8b6d-466a-92f5-68e649d31298" containerName="nova-cell0-conductor-conductor" containerID="cri-o://bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.200049 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-57cd5fc748-p2rdp" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api" containerID="cri-o://150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3" gracePeriod=30 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.236252 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_262a88da-d2e8-428f-b38a-1b59714ddfe7/ovsdbserver-nb/0.log" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.236459 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.243900 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.344716 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-nb\") pod \"ca3311fd-ac43-4729-81e1-472da7bf5878\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.344781 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-svc\") pod \"ca3311fd-ac43-4729-81e1-472da7bf5878\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.344891 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdb-rundir\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.344920 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-config\") pod \"ca3311fd-ac43-4729-81e1-472da7bf5878\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.344939 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdbserver-nb-tls-certs\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.344965 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-scripts\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345002 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-combined-ca-bundle\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345044 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-swift-storage-0\") pod \"ca3311fd-ac43-4729-81e1-472da7bf5878\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345079 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b96p4\" (UniqueName: \"kubernetes.io/projected/262a88da-d2e8-428f-b38a-1b59714ddfe7-kube-api-access-b96p4\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345171 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-config\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345197 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345237 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5ztd\" (UniqueName: \"kubernetes.io/projected/ca3311fd-ac43-4729-81e1-472da7bf5878-kube-api-access-l5ztd\") pod \"ca3311fd-ac43-4729-81e1-472da7bf5878\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345306 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-sb\") pod \"ca3311fd-ac43-4729-81e1-472da7bf5878\" (UID: \"ca3311fd-ac43-4729-81e1-472da7bf5878\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.345385 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-metrics-certs-tls-certs\") pod \"262a88da-d2e8-428f-b38a-1b59714ddfe7\" (UID: \"262a88da-d2e8-428f-b38a-1b59714ddfe7\") " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.347112 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.348936 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-config" (OuterVolumeSpecName: "config") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.349654 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-scripts" (OuterVolumeSpecName: "scripts") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.355427 5081 scope.go:117] "RemoveContainer" containerID="9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1" Oct 03 15:53:04 crc kubenswrapper[5081]: E1003 15:53:04.356215 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1\": container with ID starting with 9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1 not found: ID does not exist" containerID="9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.356273 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1"} err="failed to get container status \"9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1\": rpc error: code = NotFound desc = could not find container \"9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1\": container with ID starting with 9a7693e87be3d3c0da95d263c251612c7533402a927ea3da985df079acaee0d1 not found: ID does not exist" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.356295 5081 scope.go:117] "RemoveContainer" containerID="2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068" Oct 03 15:53:04 crc kubenswrapper[5081]: E1003 15:53:04.357857 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068\": container with ID starting with 2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068 not found: ID does not exist" containerID="2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.357885 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068"} err="failed to get container status \"2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068\": rpc error: code = NotFound desc = could not find container \"2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068\": container with ID starting with 2a85cda1b3dd609058bad5b014e89fdd4f56aa6a17ad2a19b4df4eb6a848f068 not found: ID does not exist" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.357899 5081 scope.go:117] "RemoveContainer" containerID="c817d8193802204a47e564f00d89b7300147b2f6e4a65ec259ecd73cefe10317" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.361117 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.363028 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/262a88da-d2e8-428f-b38a-1b59714ddfe7-kube-api-access-b96p4" (OuterVolumeSpecName: "kube-api-access-b96p4") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "kube-api-access-b96p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.366902 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca3311fd-ac43-4729-81e1-472da7bf5878-kube-api-access-l5ztd" (OuterVolumeSpecName: "kube-api-access-l5ztd") pod "ca3311fd-ac43-4729-81e1-472da7bf5878" (UID: "ca3311fd-ac43-4729-81e1-472da7bf5878"). InnerVolumeSpecName "kube-api-access-l5ztd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.430368 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.440364 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.459511 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.459550 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.459578 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b96p4\" (UniqueName: \"kubernetes.io/projected/262a88da-d2e8-428f-b38a-1b59714ddfe7-kube-api-access-b96p4\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.459590 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/262a88da-d2e8-428f-b38a-1b59714ddfe7-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.459626 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.459639 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5ztd\" (UniqueName: \"kubernetes.io/projected/ca3311fd-ac43-4729-81e1-472da7bf5878-kube-api-access-l5ztd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.509693 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderedee-account-delete-tcdxx"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.526622 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5df6654c96-f7vp5"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.540594 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement9594-account-delete-mxv78"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.546451 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance0aea-account-delete-zkcjp"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.590185 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron4548-account-delete-zmhl4"] Oct 03 15:53:04 crc kubenswrapper[5081]: W1003 15:53:04.622213 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd5af114_d170_46ce_8a46_ec0b65ddb545.slice/crio-44844f51fb0baf1185faf64810b79e0c150c92233ef89a368af98242a0874ae3 WatchSource:0}: Error finding container 44844f51fb0baf1185faf64810b79e0c150c92233ef89a368af98242a0874ae3: Status 404 returned error can't find the container with id 44844f51fb0baf1185faf64810b79e0c150c92233ef89a368af98242a0874ae3 Oct 03 15:53:04 crc kubenswrapper[5081]: E1003 15:53:04.679275 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 03 15:53:04 crc kubenswrapper[5081]: E1003 15:53:04.679407 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data podName:7060c6c8-bbe8-47ae-8ef2-4358291dbb61 nodeName:}" failed. No retries permitted until 2025-10-03 15:53:08.679384095 +0000 UTC m=+1507.644940708 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data") pod "rabbitmq-server-0" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61") : configmap "rabbitmq-config-data" not found Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.742199 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic1ef-account-delete-62l5c"] Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.855401 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-7cc85979bf-f5kj9" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.165:8080/healthcheck\": dial tcp 10.217.0.165:8080: connect: connection refused" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.855905 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-7cc85979bf-f5kj9" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.165:8080/healthcheck\": dial tcp 10.217.0.165:8080: connect: connection refused" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.866167 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.884497 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.899075 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ca3311fd-ac43-4729-81e1-472da7bf5878" (UID: "ca3311fd-ac43-4729-81e1-472da7bf5878"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.942462 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic1ef-account-delete-62l5c" event={"ID":"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5","Type":"ContainerStarted","Data":"64462caa445d3495647047cefb839dd94ebf0cd1aeae1ec5260082beed69fb7c"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.944196 5081 generic.go:334] "Generic (PLEG): container finished" podID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerID="424ab139179e1f0e89a94103edf7bef82522d7b08ab2d180de8ddb2ba8927c6e" exitCode=143 Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.944236 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" event={"ID":"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19","Type":"ContainerDied","Data":"424ab139179e1f0e89a94103edf7bef82522d7b08ab2d180de8ddb2ba8927c6e"} Oct 03 15:53:04 crc kubenswrapper[5081]: I1003 15:53:04.987666 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007354 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="e80c760b9b37c5a71b037090135c5ba4f32f98d8709aa318d7bf69734c058ea2" exitCode=0 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007613 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="2d265cc1788a8ffd41d868c45e20c3c29c12f51c4e066b3d0b0c81546645bcfa" exitCode=0 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007624 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="676eedca602ba56315b71044aa07d745f875330d2c6a9a252a84c0c20469a5b3" exitCode=0 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007632 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="2dc48262e0ade0cd5b46c732df33fce4d98185362e199780f79f0145f57aa828" exitCode=0 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007708 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"e80c760b9b37c5a71b037090135c5ba4f32f98d8709aa318d7bf69734c058ea2"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007740 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"2d265cc1788a8ffd41d868c45e20c3c29c12f51c4e066b3d0b0c81546645bcfa"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"676eedca602ba56315b71044aa07d745f875330d2c6a9a252a84c0c20469a5b3"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.007764 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"2dc48262e0ade0cd5b46c732df33fce4d98185362e199780f79f0145f57aa828"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.009017 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedee-account-delete-tcdxx" event={"ID":"e86e6033-7336-4deb-bf90-8c4941d56542","Type":"ContainerStarted","Data":"0b5d509d21e76fb092e8a9f720e51c9b9f3b048c51f5b30bd82dc38072d9922a"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.012446 5081 generic.go:334] "Generic (PLEG): container finished" podID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerID="c75af55c799a16c6d3fb1fcae9ca1ebc16c7f7ce2f3b26cb7521040390a192ad" exitCode=0 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.012496 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ff85fbc4f-f9zcx" event={"ID":"264449b6-a64d-4d0b-a465-616fa49b3eca","Type":"ContainerDied","Data":"c75af55c799a16c6d3fb1fcae9ca1ebc16c7f7ce2f3b26cb7521040390a192ad"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.014908 5081 generic.go:334] "Generic (PLEG): container finished" podID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerID="7ad6062447176d359892df549a0f432d03509731f48fa17e1a53bd11f1b7042c" exitCode=0 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.014953 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cbc5b56e-83ed-460e-a57d-2c51df24c5dc","Type":"ContainerDied","Data":"7ad6062447176d359892df549a0f432d03509731f48fa17e1a53bd11f1b7042c"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.022383 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" event={"ID":"ca3311fd-ac43-4729-81e1-472da7bf5878","Type":"ContainerDied","Data":"b3e358210a5641428baf77412ce5fd9844259ac33fc39dd3fcecd4549f57dcf0"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.022429 5081 scope.go:117] "RemoveContainer" containerID="d4a68c5c897302f035bc3b86f9336410e2d31a6118c0346d07b8cde661598f36" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.022587 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84cbdd78c7-br9mh" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.054221 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron4548-account-delete-zmhl4" event={"ID":"83b5887d-7cec-4aef-8d5c-041346dbc89f","Type":"ContainerStarted","Data":"b8d4462cf3e0bee8ace9b8c43fdd447665858e5c3fba54e411218dbfb309ff21"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.056469 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-577c5877f7-gslrj" event={"ID":"fcbf652f-a193-47f7-872b-e9864a40cd0a","Type":"ContainerStarted","Data":"65b717a24fa17c44796587db00e37ce3290f0faba7d3338dd5922b855e7ae029"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.060860 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_262a88da-d2e8-428f-b38a-1b59714ddfe7/ovsdbserver-nb/0.log" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.061090 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"262a88da-d2e8-428f-b38a-1b59714ddfe7","Type":"ContainerDied","Data":"fa3d3d2bcd76a6f259158815318000e06e1f52e78c04db4591f1e1a3e78f6c69"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.061207 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.065581 5081 generic.go:334] "Generic (PLEG): container finished" podID="074c619e-3cb7-417e-8192-9d13725cdde5" containerID="a66b52b10331b1c92cc1455a575bb5bad525d4a39ad38db2e4104fb8919818e2" exitCode=143 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.065649 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"074c619e-3cb7-417e-8192-9d13725cdde5","Type":"ContainerDied","Data":"a66b52b10331b1c92cc1455a575bb5bad525d4a39ad38db2e4104fb8919818e2"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.075895 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.077999 5081 generic.go:334] "Generic (PLEG): container finished" podID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerID="5edccb45492f37a61a761bda119802fecb308fa81cec1b916185a47045d64830" exitCode=143 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.078112 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" event={"ID":"681f8c15-9cbf-4416-83c4-36429c38a18d","Type":"ContainerDied","Data":"5edccb45492f37a61a761bda119802fecb308fa81cec1b916185a47045d64830"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.088852 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance0aea-account-delete-zkcjp" event={"ID":"a650c48d-76ae-45a3-b79c-e6e014009769","Type":"ContainerStarted","Data":"dd9fe35559aea93247f10beae16eb3e08363367015c9318222e2de501012dd9e"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.089230 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.097228 5081 generic.go:334] "Generic (PLEG): container finished" podID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerID="b5c101a43a3295f40820a6e0bc4115d66a17ebbfd93458ef2f3e176a858daf89" exitCode=0 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.097319 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7cc85979bf-f5kj9" event={"ID":"262952b6-434e-4da7-8cdb-b7073a71f13c","Type":"ContainerDied","Data":"b5c101a43a3295f40820a6e0bc4115d66a17ebbfd93458ef2f3e176a858daf89"} Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.097096 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db is running failed: container process not found" containerID="bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.104848 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db is running failed: container process not found" containerID="bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.106098 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db is running failed: container process not found" containerID="bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.106129 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="dfe2dfd3-8b6d-466a-92f5-68e649d31298" containerName="nova-cell0-conductor-conductor" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.131962 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ca3311fd-ac43-4729-81e1-472da7bf5878" (UID: "ca3311fd-ac43-4729-81e1-472da7bf5878"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.138098 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ca3311fd-ac43-4729-81e1-472da7bf5878" (UID: "ca3311fd-ac43-4729-81e1-472da7bf5878"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.149978 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df6654c96-f7vp5" event={"ID":"bd5af114-d170-46ce-8a46-ec0b65ddb545","Type":"ContainerStarted","Data":"44844f51fb0baf1185faf64810b79e0c150c92233ef89a368af98242a0874ae3"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.157337 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-config" (OuterVolumeSpecName: "config") pod "ca3311fd-ac43-4729-81e1-472da7bf5878" (UID: "ca3311fd-ac43-4729-81e1-472da7bf5878"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.165340 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" event={"ID":"684a3452-107a-4e1f-93a5-c063711e6377","Type":"ContainerStarted","Data":"304034c7570a894a485d85232f959ab51f7a6cbbd0090b93e1d195aa002ec3c7"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.174788 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-4j44h_930b2d10-e4fc-4e90-902a-2cb2068e2fcf/openstack-network-exporter/0.log" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.174872 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-4j44h" event={"ID":"930b2d10-e4fc-4e90-902a-2cb2068e2fcf","Type":"ContainerDied","Data":"adf9592aeef114010d56f13b557ba9cd2b077b43920be74586bfafde6fe44d40"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.174977 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-4j44h" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.176203 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.187394 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57cd5fc748-p2rdp" event={"ID":"49cb0be2-f988-48bc-afd9-bb4bd348de1f","Type":"ContainerDied","Data":"8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.187469 5081 generic.go:334] "Generic (PLEG): container finished" podID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerID="8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f" exitCode=143 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.191141 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.191168 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.191180 5081 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.191194 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.193659 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement9594-account-delete-mxv78" event={"ID":"4198a604-1b99-4822-9377-afaaef616d15","Type":"ContainerStarted","Data":"5a142b679a6b1ba417969d37e853604bbe3e8e9150c8a0ee39fa3a5f71c57e71"} Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.210989 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ca3311fd-ac43-4729-81e1-472da7bf5878" (UID: "ca3311fd-ac43-4729-81e1-472da7bf5878"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.258799 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "262a88da-d2e8-428f-b38a-1b59714ddfe7" (UID: "262a88da-d2e8-428f-b38a-1b59714ddfe7"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.294265 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ca3311fd-ac43-4729-81e1-472da7bf5878-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.294300 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/262a88da-d2e8-428f-b38a-1b59714ddfe7-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.294334 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.294530 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data podName:5344c615-93ae-4a4a-95b1-3bbe3327f42e nodeName:}" failed. No retries permitted until 2025-10-03 15:53:09.294511535 +0000 UTC m=+1508.260068148 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data") pod "rabbitmq-cell1-server-0" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e") : configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.443457 5081 scope.go:117] "RemoveContainer" containerID="31ffa9f16a4b4b7ea9f2a616d344cb10ab401b43531108fcd040dc93ba817552" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.551180 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.610657 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84cbdd78c7-br9mh"] Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.622258 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84cbdd78c7-br9mh"] Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.638542 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.651593 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.664319 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-4j44h"] Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.669527 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-4j44h"] Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.680859 5081 scope.go:117] "RemoveContainer" containerID="466a0e5a4bc9f929bdfea1ce7776a7ad5217cf962aae9b86e3401192bd5301ea" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.712977 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5x4z\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-kube-api-access-p5x4z\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.713532 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-run-httpd\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.713583 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-etc-swift\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.713606 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-config-data\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.713629 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-internal-tls-certs\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.713782 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-public-tls-certs\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.713868 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-combined-ca-bundle\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.713900 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-log-httpd\") pod \"262952b6-434e-4da7-8cdb-b7073a71f13c\" (UID: \"262952b6-434e-4da7-8cdb-b7073a71f13c\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.714265 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.714945 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.715155 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.715171 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/262952b6-434e-4da7-8cdb-b7073a71f13c-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.727652 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": read tcp 10.217.0.2:39264->10.217.0.164:8776: read: connection reset by peer" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.746198 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.752332 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-kube-api-access-p5x4z" (OuterVolumeSpecName: "kube-api-access-p5x4z") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "kube-api-access-p5x4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.756009 5081 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Oct 03 15:53:05 crc kubenswrapper[5081]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-10-03T15:53:03Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Oct 03 15:53:05 crc kubenswrapper[5081]: /etc/init.d/functions: line 589: 432 Alarm clock "$@" Oct 03 15:53:05 crc kubenswrapper[5081]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-kngsq" message=< Oct 03 15:53:05 crc kubenswrapper[5081]: Exiting ovn-controller (1) [FAILED] Oct 03 15:53:05 crc kubenswrapper[5081]: Killing ovn-controller (1) [ OK ] Oct 03 15:53:05 crc kubenswrapper[5081]: 2025-10-03T15:53:03Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Oct 03 15:53:05 crc kubenswrapper[5081]: /etc/init.d/functions: line 589: 432 Alarm clock "$@" Oct 03 15:53:05 crc kubenswrapper[5081]: > Oct 03 15:53:05 crc kubenswrapper[5081]: E1003 15:53:05.756043 5081 kuberuntime_container.go:691] "PreStop hook failed" err=< Oct 03 15:53:05 crc kubenswrapper[5081]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-10-03T15:53:03Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Oct 03 15:53:05 crc kubenswrapper[5081]: /etc/init.d/functions: line 589: 432 Alarm clock "$@" Oct 03 15:53:05 crc kubenswrapper[5081]: > pod="openstack/ovn-controller-kngsq" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" containerID="cri-o://973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.756079 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-kngsq" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" containerID="cri-o://973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f" gracePeriod=27 Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.756272 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ovn-controller-kngsq" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" probeResult="failure" output="" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.802806 5081 scope.go:117] "RemoveContainer" containerID="e4fc38b21f34ff39a2dd889839a97cf840963709780ef017e55d3f37bdaa21ec" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.816880 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5x4z\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-kube-api-access-p5x4z\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.816908 5081 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/262952b6-434e-4da7-8cdb-b7073a71f13c-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.818303 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.864242 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0487cd14-638b-44f4-9154-d5d57307f0bd" path="/var/lib/kubelet/pods/0487cd14-638b-44f4-9154-d5d57307f0bd/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.865675 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="090ece03-2db4-4ab8-9ae0-a18b750bdc8b" path="/var/lib/kubelet/pods/090ece03-2db4-4ab8-9ae0-a18b750bdc8b/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.866663 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1498b040-f5c3-404a-aced-26e9b82387c9" path="/var/lib/kubelet/pods/1498b040-f5c3-404a-aced-26e9b82387c9/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.868479 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" path="/var/lib/kubelet/pods/262a88da-d2e8-428f-b38a-1b59714ddfe7/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.869763 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="322d9ed5-f62f-4984-b44c-013901c00c9a" path="/var/lib/kubelet/pods/322d9ed5-f62f-4984-b44c-013901c00c9a/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.870368 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aeecca0-1628-4103-ba8b-5dcf3bb564e3" path="/var/lib/kubelet/pods/5aeecca0-1628-4103-ba8b-5dcf3bb564e3/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.871102 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d4651e0-3f61-4f14-8239-326b5203caf5" path="/var/lib/kubelet/pods/5d4651e0-3f61-4f14-8239-326b5203caf5/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.877172 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e1d7200-8b2d-4cf8-be4a-7924fc195005" path="/var/lib/kubelet/pods/6e1d7200-8b2d-4cf8-be4a-7924fc195005/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.877841 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79acb98c-21e6-4bad-ad5b-4ebc855e6378" path="/var/lib/kubelet/pods/79acb98c-21e6-4bad-ad5b-4ebc855e6378/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.880657 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b0f63e5-fce3-4604-9212-2d951a310c81" path="/var/lib/kubelet/pods/8b0f63e5-fce3-4604-9212-2d951a310c81/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.881390 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="930b2d10-e4fc-4e90-902a-2cb2068e2fcf" path="/var/lib/kubelet/pods/930b2d10-e4fc-4e90-902a-2cb2068e2fcf/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.882062 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b82c10f0-5e99-44c9-807d-e34c63b187e9" path="/var/lib/kubelet/pods/b82c10f0-5e99-44c9-807d-e34c63b187e9/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.882552 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca3311fd-ac43-4729-81e1-472da7bf5878" path="/var/lib/kubelet/pods/ca3311fd-ac43-4729-81e1-472da7bf5878/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.883771 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5d75d32-d4c9-4f1c-b62d-f44a65f973a1" path="/var/lib/kubelet/pods/e5d75d32-d4c9-4f1c-b62d-f44a65f973a1/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.884478 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3630909-8ada-4296-af7c-8135f2221e39" path="/var/lib/kubelet/pods/f3630909-8ada-4296-af7c-8135f2221e39/volumes" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.917734 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data\") pod \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.917870 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-scripts\") pod \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.918011 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-combined-ca-bundle\") pod \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.918034 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-etc-machine-id\") pod \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.918065 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data-custom\") pod \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.918089 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fvj8\" (UniqueName: \"kubernetes.io/projected/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-kube-api-access-4fvj8\") pod \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\" (UID: \"cbc5b56e-83ed-460e-a57d-2c51df24c5dc\") " Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.922915 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "cbc5b56e-83ed-460e-a57d-2c51df24c5dc" (UID: "cbc5b56e-83ed-460e-a57d-2c51df24c5dc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.988902 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cbc5b56e-83ed-460e-a57d-2c51df24c5dc" (UID: "cbc5b56e-83ed-460e-a57d-2c51df24c5dc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.989606 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-scripts" (OuterVolumeSpecName: "scripts") pod "cbc5b56e-83ed-460e-a57d-2c51df24c5dc" (UID: "cbc5b56e-83ed-460e-a57d-2c51df24c5dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:05 crc kubenswrapper[5081]: I1003 15:53:05.990739 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-kube-api-access-4fvj8" (OuterVolumeSpecName: "kube-api-access-4fvj8") pod "cbc5b56e-83ed-460e-a57d-2c51df24c5dc" (UID: "cbc5b56e-83ed-460e-a57d-2c51df24c5dc"). InnerVolumeSpecName "kube-api-access-4fvj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.020233 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.020272 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.020283 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fvj8\" (UniqueName: \"kubernetes.io/projected/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-kube-api-access-4fvj8\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.020293 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.068784 5081 scope.go:117] "RemoveContainer" containerID="e241addceb133533afe94ad230a8d11c2bf695f52e2d891b7ebe29478660b475" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.252840 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-kngsq_686d3fe6-8cc9-4013-a5f3-55fe41ac840e/ovn-controller/0.log" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.252882 5081 generic.go:334] "Generic (PLEG): container finished" podID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerID="973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f" exitCode=143 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.261418 5081 generic.go:334] "Generic (PLEG): container finished" podID="dfe2dfd3-8b6d-466a-92f5-68e649d31298" containerID="bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db" exitCode=0 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.289858 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener-log" containerID="cri-o://702ba6b98e592df611cab939fccc402c5b2b70dd82e3db9c234d04ce1db93139" gracePeriod=30 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.290430 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener" containerID="cri-o://05bb924771a100f2ff1e235a945a86029a7579a8826ee5c4ed8c670df36b3446" gracePeriod=30 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.309786 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.356399 5081 generic.go:334] "Generic (PLEG): container finished" podID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerID="9791a255062a856210f6f6d869def3d35e782763a70a0b45636ea36481f99787" exitCode=0 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.370203 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7cc85979bf-f5kj9" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.404047 5081 generic.go:334] "Generic (PLEG): container finished" podID="155bac7b-055b-4bca-a155-f5ab13dacf80" containerID="f24a2e28a7c0ad1349c328c219041c0c090d938313a1795249776310239401b8" exitCode=0 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.411223 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" podStartSLOduration=6.411180279 podStartE2EDuration="6.411180279s" podCreationTimestamp="2025-10-03 15:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:53:06.336807665 +0000 UTC m=+1505.302364278" watchObservedRunningTime="2025-10-03 15:53:06.411180279 +0000 UTC m=+1505.376736892" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.417827 5081 generic.go:334] "Generic (PLEG): container finished" podID="a650c48d-76ae-45a3-b79c-e6e014009769" containerID="1e5cd96417e6671023463d9a740c00e60c021e2e1be7b522e4ffbdd84d119b70" exitCode=0 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.444340 5081 generic.go:334] "Generic (PLEG): container finished" podID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerID="11222103517d49608c40c5db5a80c4fb0e6936aa3b9146f78928edb3436b5c6e" exitCode=0 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.455714 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novaapic1ef-account-delete-62l5c" podUID="6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5" containerName="mariadb-account-delete" containerID="cri-o://d584375736a0b9b1bb5ab0e43ef15ea58c0ab9847c889a51251a6379833bea5f" gracePeriod=30 Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.525651 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-58db4df78-q9st4" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.149:8778/\": read tcp 10.217.0.2:58370->10.217.0.149:8778: read: connection reset by peer" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.528940 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-58db4df78-q9st4" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.149:8778/\": read tcp 10.217.0.2:58356->10.217.0.149:8778: read: connection reset by peer" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.692110 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapic1ef-account-delete-62l5c" podStartSLOduration=5.692086232 podStartE2EDuration="5.692086232s" podCreationTimestamp="2025-10-03 15:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:53:06.473397266 +0000 UTC m=+1505.438953899" watchObservedRunningTime="2025-10-03 15:53:06.692086232 +0000 UTC m=+1505.657642865" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.693883 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.168:9292/healthcheck\": read tcp 10.217.0.2:36040->10.217.0.168:9292: read: connection reset by peer" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.694008 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.168:9292/healthcheck\": read tcp 10.217.0.2:36044->10.217.0.168:9292: read: connection reset by peer" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.759817 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.767223 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.767355 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cbc5b56e-83ed-460e-a57d-2c51df24c5dc" (UID: "cbc5b56e-83ed-460e-a57d-2c51df24c5dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.780188 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.780235 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.780252 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.792257 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.801730 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-config-data" (OuterVolumeSpecName: "config-data") pod "262952b6-434e-4da7-8cdb-b7073a71f13c" (UID: "262952b6-434e-4da7-8cdb-b7073a71f13c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.801930 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.169:9292/healthcheck\": read tcp 10.217.0.2:37598->10.217.0.169:9292: read: connection reset by peer" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.801930 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.169:9292/healthcheck\": read tcp 10.217.0.2:37610->10.217.0.169:9292: read: connection reset by peer" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.884972 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.885029 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/262952b6-434e-4da7-8cdb-b7073a71f13c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.969651 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data" (OuterVolumeSpecName: "config-data") pod "cbc5b56e-83ed-460e-a57d-2c51df24c5dc" (UID: "cbc5b56e-83ed-460e-a57d-2c51df24c5dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:06 crc kubenswrapper[5081]: I1003 15:53:06.992434 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbc5b56e-83ed-460e-a57d-2c51df24c5dc-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.110465 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.112922 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.132913 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.132984 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7abf152b-a4ec-4114-bb59-491582952b05" containerName="nova-scheduler-scheduler" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.214243 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f is running failed: container process not found" containerID="973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.217912 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f is running failed: container process not found" containerID="973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.220536 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f is running failed: container process not found" containerID="973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.220594 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-kngsq" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.234382 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.234808 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.235103 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.235129 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.235686 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.239207 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.256451 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.256514 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.531914 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-kngsq_686d3fe6-8cc9-4013-a5f3-55fe41ac840e/ovn-controller/0.log" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.536024 5081 generic.go:334] "Generic (PLEG): container finished" podID="4198a604-1b99-4822-9377-afaaef616d15" containerID="469eeeba4c4c6773e03905fab4594c164e4caa8017190470fb7f3ea24e807fc1" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.561971 5081 generic.go:334] "Generic (PLEG): container finished" podID="6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5" containerID="d584375736a0b9b1bb5ab0e43ef15ea58c0ab9847c889a51251a6379833bea5f" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.565308 5081 generic.go:334] "Generic (PLEG): container finished" podID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerID="71bc762a04da349305dfb7aba1fc1915351db7ac6e3d40db700ab69ee2492adf" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.599476 5081 generic.go:334] "Generic (PLEG): container finished" podID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerID="683a9cea3704f28dee554ad7d0a5fd46617595e708f08884aeeafa501eeeb131" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.601345 5081 generic.go:334] "Generic (PLEG): container finished" podID="074c619e-3cb7-417e-8192-9d13725cdde5" containerID="a11a2c348b41af6175d5a702fc5ef23abb0b4539c6157c033c27932942b63f91" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.605273 5081 generic.go:334] "Generic (PLEG): container finished" podID="684a3452-107a-4e1f-93a5-c063711e6377" containerID="702ba6b98e592df611cab939fccc402c5b2b70dd82e3db9c234d04ce1db93139" exitCode=143 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.626132 5081 generic.go:334] "Generic (PLEG): container finished" podID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerID="97c93f28e63efe77b049abc94e9494220f89aeb5a45b0d485d1782988daeb773" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.632658 5081 generic.go:334] "Generic (PLEG): container finished" podID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerID="7893281dd7f6e155dae82597172d11e93702c9a53318c8564135c0043e49216e" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.635046 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinderedee-account-delete-tcdxx" podUID="e86e6033-7336-4deb-bf90-8c4941d56542" containerName="mariadb-account-delete" containerID="cri-o://bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.636744 5081 generic.go:334] "Generic (PLEG): container finished" podID="83b5887d-7cec-4aef-8d5c-041346dbc89f" containerID="644e9ebb368a3aa467dad7915d9d3f3bbe1d1f79dd2a2f5ca01718aad4eb4385" exitCode=0 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.640209 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-577c5877f7-gslrj" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker-log" containerID="cri-o://65b717a24fa17c44796587db00e37ce3290f0faba7d3338dd5922b855e7ae029" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.640214 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-577c5877f7-gslrj" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker" containerID="cri-o://9d8741c1eed1c55db0932c406767602466524f85522227eda0fdd1d95c35f2e1" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.660307 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinderedee-account-delete-tcdxx" podStartSLOduration=6.6602814949999996 podStartE2EDuration="6.660281495s" podCreationTimestamp="2025-10-03 15:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:53:07.6531071 +0000 UTC m=+1506.618663713" watchObservedRunningTime="2025-10-03 15:53:07.660281495 +0000 UTC m=+1506.625838108" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.686828 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-577c5877f7-gslrj" podStartSLOduration=7.686810833 podStartE2EDuration="7.686810833s" podCreationTimestamp="2025-10-03 15:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:53:07.683149188 +0000 UTC m=+1506.648705811" watchObservedRunningTime="2025-10-03 15:53:07.686810833 +0000 UTC m=+1506.652367446" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.805419 5081 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.971s" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805522 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq" event={"ID":"686d3fe6-8cc9-4013-a5f3-55fe41ac840e","Type":"ContainerDied","Data":"973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805546 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805582 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805595 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"dfe2dfd3-8b6d-466a-92f5-68e649d31298","Type":"ContainerDied","Data":"bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805608 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" event={"ID":"684a3452-107a-4e1f-93a5-c063711e6377","Type":"ContainerStarted","Data":"05bb924771a100f2ff1e235a945a86029a7579a8826ee5c4ed8c670df36b3446"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805619 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805636 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-rd97m"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805644 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dfcmf"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805654 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" event={"ID":"684a3452-107a-4e1f-93a5-c063711e6377","Type":"ContainerStarted","Data":"702ba6b98e592df611cab939fccc402c5b2b70dd82e3db9c234d04ce1db93139"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805668 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-rd97m"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805681 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dfcmf"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.805693 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone106f-account-delete-kh474"] Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806065 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="ovsdbserver-sb" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806079 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="ovsdbserver-sb" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806094 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-server" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806099 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-server" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806112 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="930b2d10-e4fc-4e90-902a-2cb2068e2fcf" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806118 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="930b2d10-e4fc-4e90-902a-2cb2068e2fcf" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806129 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerName="dnsmasq-dns" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806135 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerName="dnsmasq-dns" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806159 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerName="init" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806166 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerName="init" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806177 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-httpd" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806183 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-httpd" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806198 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="ovsdbserver-nb" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806205 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="ovsdbserver-nb" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806222 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="cinder-scheduler" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806229 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="cinder-scheduler" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806243 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806250 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806260 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806269 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: E1003 15:53:07.806288 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="probe" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806295 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="probe" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806508 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-server" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806525 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="ovsdbserver-nb" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806537 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="262a88da-d2e8-428f-b38a-1b59714ddfe7" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.806547 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" containerName="proxy-httpd" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.808079 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-central-agent" containerID="cri-o://e89a72236f59fd3fd90bec6d4e3cb013c31ae0d3b3cd93b35b04de7bbc1b544a" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.808294 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="proxy-httpd" containerID="cri-o://c0cc61bb578e6b82b21ec38b2933e461e6db44834efbf165d4d23566ac8055d0" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.808410 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-notification-agent" containerID="cri-o://d458d29d42b270ecda96b4132690bafef24693a1bcafe119e2fb5b9bab9353b5" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.808444 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="sg-core" containerID="cri-o://15cacaf50eb36833c76259d52ef81d26141a2d02ee727e2efd6a2a4ccbfc0c93" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809052 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="b12ba3f3-51d4-4c3d-9677-d0a632be0974" containerName="kube-state-metrics" containerID="cri-o://375aa21783d8942e6463ead5ec2f108cd31f251a593b6147df3ae5e0bcca62a7" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809078 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-57cd5fc748-p2rdp" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.159:9311/healthcheck\": dial tcp 10.217.0.159:9311: connect: connection refused" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809141 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="cinder-scheduler" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809158 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809173 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca3311fd-ac43-4729-81e1-472da7bf5878" containerName="dnsmasq-dns" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809196 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" containerName="probe" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809218 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3630909-8ada-4296-af7c-8135f2221e39" containerName="ovsdbserver-sb" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809232 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="930b2d10-e4fc-4e90-902a-2cb2068e2fcf" containerName="openstack-network-exporter" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809485 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="ca0c9b41-c081-4a81-90f2-730e16c7d347" containerName="memcached" containerID="cri-o://e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810043 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cbc5b56e-83ed-460e-a57d-2c51df24c5dc","Type":"ContainerDied","Data":"7c68f4c51d68116b886b83a476595c798152e15a298c17e3ecc5a524978ed239"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810074 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7f5c54b599-s8jwr"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810100 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810117 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" event={"ID":"681f8c15-9cbf-4416-83c4-36429c38a18d","Type":"ContainerDied","Data":"9791a255062a856210f6f6d869def3d35e782763a70a0b45636ea36481f99787"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810134 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7cc85979bf-f5kj9" event={"ID":"262952b6-434e-4da7-8cdb-b7073a71f13c","Type":"ContainerDied","Data":"89e7805c88e4fa8234af0fc9c412e45f150caef771e07e4862918d5fc3eff90b"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810152 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone106f-account-delete-kh474"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810166 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"155bac7b-055b-4bca-a155-f5ab13dacf80","Type":"ContainerDied","Data":"f24a2e28a7c0ad1349c328c219041c0c090d938313a1795249776310239401b8"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810181 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance0aea-account-delete-zkcjp" event={"ID":"a650c48d-76ae-45a3-b79c-e6e014009769","Type":"ContainerDied","Data":"1e5cd96417e6671023463d9a740c00e60c021e2e1be7b522e4ffbdd84d119b70"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810193 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6547ead1-44e3-45f3-a668-fff64776f1f6","Type":"ContainerDied","Data":"11222103517d49608c40c5db5a80c4fb0e6936aa3b9146f78928edb3436b5c6e"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810209 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-nwrww"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810223 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic1ef-account-delete-62l5c" event={"ID":"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5","Type":"ContainerStarted","Data":"d584375736a0b9b1bb5ab0e43ef15ea58c0ab9847c889a51251a6379833bea5f"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810238 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron4548-account-delete-zmhl4" event={"ID":"83b5887d-7cec-4aef-8d5c-041346dbc89f","Type":"ContainerStarted","Data":"644e9ebb368a3aa467dad7915d9d3f3bbe1d1f79dd2a2f5ca01718aad4eb4385"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810252 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"dfe2dfd3-8b6d-466a-92f5-68e649d31298","Type":"ContainerDied","Data":"1a13d75e73c7a71c393b87f168bb6a1bf93b244e383b010bffcb23865d895ac4"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810268 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a13d75e73c7a71c393b87f168bb6a1bf93b244e383b010bffcb23865d895ac4" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810276 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df6654c96-f7vp5" event={"ID":"bd5af114-d170-46ce-8a46-ec0b65ddb545","Type":"ContainerStarted","Data":"dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810288 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" event={"ID":"681f8c15-9cbf-4416-83c4-36429c38a18d","Type":"ContainerDied","Data":"186c656d2ca647d5203c49066d02b34873109096006cca45fa7d819f8b971bf1"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810295 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="186c656d2ca647d5203c49066d02b34873109096006cca45fa7d819f8b971bf1" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810307 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-nwrww"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810322 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kngsq" event={"ID":"686d3fe6-8cc9-4013-a5f3-55fe41ac840e","Type":"ContainerDied","Data":"3ce3b0ab74af7cb73921966b700406748ad8e7caeb98f8354f43b97238d35964"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810334 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ce3b0ab74af7cb73921966b700406748ad8e7caeb98f8354f43b97238d35964" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810344 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement9594-account-delete-mxv78" event={"ID":"4198a604-1b99-4822-9377-afaaef616d15","Type":"ContainerDied","Data":"469eeeba4c4c6773e03905fab4594c164e4caa8017190470fb7f3ea24e807fc1"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810359 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-106f-account-create-frpqk"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810373 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6547ead1-44e3-45f3-a668-fff64776f1f6","Type":"ContainerDied","Data":"6b8a6399c8657f3408bf3023cf043092593eaa0c540354e2d3c57d44064ccfb2"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810381 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b8a6399c8657f3408bf3023cf043092593eaa0c540354e2d3c57d44064ccfb2" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810391 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone106f-account-delete-kh474"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810412 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance0aea-account-delete-zkcjp" event={"ID":"a650c48d-76ae-45a3-b79c-e6e014009769","Type":"ContainerDied","Data":"dd9fe35559aea93247f10beae16eb3e08363367015c9318222e2de501012dd9e"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810427 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd9fe35559aea93247f10beae16eb3e08363367015c9318222e2de501012dd9e" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810437 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic1ef-account-delete-62l5c" event={"ID":"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5","Type":"ContainerDied","Data":"d584375736a0b9b1bb5ab0e43ef15ea58c0ab9847c889a51251a6379833bea5f"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810448 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-106f-account-create-frpqk"] Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810465 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed01e164-93b5-47c7-9ec0-7a00d6942c66","Type":"ContainerDied","Data":"71bc762a04da349305dfb7aba1fc1915351db7ac6e3d40db700ab69ee2492adf"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810478 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"155bac7b-055b-4bca-a155-f5ab13dacf80","Type":"ContainerDied","Data":"c47f7ae7f3a3f3d5783c5d4e9eba68e68c969be9cb8cde5977881377e12ea501"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810487 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c47f7ae7f3a3f3d5783c5d4e9eba68e68c969be9cb8cde5977881377e12ea501" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810501 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58db4df78-q9st4" event={"ID":"35ad4758-5e5f-4ba3-84be-a4ae754e9048","Type":"ContainerDied","Data":"683a9cea3704f28dee554ad7d0a5fd46617595e708f08884aeeafa501eeeb131"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810516 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-58db4df78-q9st4" event={"ID":"35ad4758-5e5f-4ba3-84be-a4ae754e9048","Type":"ContainerDied","Data":"f594147252eaa981370a57f87e2776304e0af74fd29f9f3d68280dfb42f3df53"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810524 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f594147252eaa981370a57f87e2776304e0af74fd29f9f3d68280dfb42f3df53" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810532 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"074c619e-3cb7-417e-8192-9d13725cdde5","Type":"ContainerDied","Data":"a11a2c348b41af6175d5a702fc5ef23abb0b4539c6157c033c27932942b63f91"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810543 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" event={"ID":"684a3452-107a-4e1f-93a5-c063711e6377","Type":"ContainerDied","Data":"702ba6b98e592df611cab939fccc402c5b2b70dd82e3db9c234d04ce1db93139"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810573 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d5aeac4-dc04-4a3e-93cf-16b00842df35","Type":"ContainerDied","Data":"97c93f28e63efe77b049abc94e9494220f89aeb5a45b0d485d1782988daeb773"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810597 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6d5aeac4-dc04-4a3e-93cf-16b00842df35","Type":"ContainerDied","Data":"ff922eab8e63389a4e1886bd85210654c942dd59ae88953ced077b10dd8d3f38"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810605 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff922eab8e63389a4e1886bd85210654c942dd59ae88953ced077b10dd8d3f38" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810614 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8341a6f-4783-4bf6-916e-ac655208ba45","Type":"ContainerDied","Data":"7893281dd7f6e155dae82597172d11e93702c9a53318c8564135c0043e49216e"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810627 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedee-account-delete-tcdxx" event={"ID":"e86e6033-7336-4deb-bf90-8c4941d56542","Type":"ContainerStarted","Data":"bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810637 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron4548-account-delete-zmhl4" event={"ID":"83b5887d-7cec-4aef-8d5c-041346dbc89f","Type":"ContainerDied","Data":"644e9ebb368a3aa467dad7915d9d3f3bbe1d1f79dd2a2f5ca01718aad4eb4385"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810648 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron4548-account-delete-zmhl4" event={"ID":"83b5887d-7cec-4aef-8d5c-041346dbc89f","Type":"ContainerDied","Data":"b8d4462cf3e0bee8ace9b8c43fdd447665858e5c3fba54e411218dbfb309ff21"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810656 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8d4462cf3e0bee8ace9b8c43fdd447665858e5c3fba54e411218dbfb309ff21" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810665 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-577c5877f7-gslrj" event={"ID":"fcbf652f-a193-47f7-872b-e9864a40cd0a","Type":"ContainerStarted","Data":"9d8741c1eed1c55db0932c406767602466524f85522227eda0fdd1d95c35f2e1"} Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810757 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone106f-account-delete-kh474" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.809207 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-57cd5fc748-p2rdp" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.159:9311/healthcheck\": dial tcp 10.217.0.159:9311: connect: connection refused" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.810999 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-7f5c54b599-s8jwr" podUID="254b0c39-e3af-4a48-a954-5ff334d36670" containerName="keystone-api" containerID="cri-o://9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18" gracePeriod=30 Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.828996 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.871593 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="b12ba3f3-51d4-4c3d-9677-d0a632be0974" containerName="kube-state-metrics" probeResult="failure" output="Get \"https://10.217.0.197:8081/readyz\": dial tcp 10.217.0.197:8081: connect: connection reset by peer" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.883075 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68de214d-52d3-4916-9c7f-720fc3ebc0f5" path="/var/lib/kubelet/pods/68de214d-52d3-4916-9c7f-720fc3ebc0f5/volumes" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.883748 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76c27ee0-08c0-41d9-89a6-30d73137b03f" path="/var/lib/kubelet/pods/76c27ee0-08c0-41d9-89a6-30d73137b03f/volumes" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.884235 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89176eaf-8985-42d1-a4b1-8fbc918beb38" path="/var/lib/kubelet/pods/89176eaf-8985-42d1-a4b1-8fbc918beb38/volumes" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.884971 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cecfad1a-8dce-4c88-8435-2ef29c6d7fb9" path="/var/lib/kubelet/pods/cecfad1a-8dce-4c88-8435-2ef29c6d7fb9/volumes" Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.911179 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-config-data\") pod \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.911510 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-combined-ca-bundle\") pod \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.911617 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsfcz\" (UniqueName: \"kubernetes.io/projected/dfe2dfd3-8b6d-466a-92f5-68e649d31298-kube-api-access-tsfcz\") pod \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\" (UID: \"dfe2dfd3-8b6d-466a-92f5-68e649d31298\") " Oct 03 15:53:07 crc kubenswrapper[5081]: I1003 15:53:07.929308 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfe2dfd3-8b6d-466a-92f5-68e649d31298-kube-api-access-tsfcz" (OuterVolumeSpecName: "kube-api-access-tsfcz") pod "dfe2dfd3-8b6d-466a-92f5-68e649d31298" (UID: "dfe2dfd3-8b6d-466a-92f5-68e649d31298"). InnerVolumeSpecName "kube-api-access-tsfcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.014462 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsfcz\" (UniqueName: \"kubernetes.io/projected/dfe2dfd3-8b6d-466a-92f5-68e649d31298-kube-api-access-tsfcz\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.048873 5081 scope.go:117] "RemoveContainer" containerID="b6fd92f8baa9b25fd91643a7fa2bd52ae9a8dc5be12a4823bad3dc1941b76522" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.051897 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.201:3000/\": dial tcp 10.217.0.201:3000: connect: connection refused" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.089672 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-config-data" (OuterVolumeSpecName: "config-data") pod "dfe2dfd3-8b6d-466a-92f5-68e649d31298" (UID: "dfe2dfd3-8b6d-466a-92f5-68e649d31298"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.111599 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfe2dfd3-8b6d-466a-92f5-68e649d31298" (UID: "dfe2dfd3-8b6d-466a-92f5-68e649d31298"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.130816 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.130855 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe2dfd3-8b6d-466a-92f5-68e649d31298-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.235356 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerName="galera" containerID="cri-o://34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a" gracePeriod=30 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.244310 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.244347 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.244365 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7cc85979bf-f5kj9"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.244377 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-7cc85979bf-f5kj9"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.262357 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.274362 5081 scope.go:117] "RemoveContainer" containerID="7ad6062447176d359892df549a0f432d03509731f48fa17e1a53bd11f1b7042c" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.301079 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.327238 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.338246 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-kngsq_686d3fe6-8cc9-4013-a5f3-55fe41ac840e/ovn-controller/0.log" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.338332 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.340686 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-nova-novncproxy-tls-certs\") pod \"155bac7b-055b-4bca-a155-f5ab13dacf80\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.340793 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-combined-ca-bundle\") pod \"155bac7b-055b-4bca-a155-f5ab13dacf80\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.341111 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-vencrypt-tls-certs\") pod \"155bac7b-055b-4bca-a155-f5ab13dacf80\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.341161 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-config-data\") pod \"155bac7b-055b-4bca-a155-f5ab13dacf80\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.341298 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmdhx\" (UniqueName: \"kubernetes.io/projected/155bac7b-055b-4bca-a155-f5ab13dacf80-kube-api-access-dmdhx\") pod \"155bac7b-055b-4bca-a155-f5ab13dacf80\" (UID: \"155bac7b-055b-4bca-a155-f5ab13dacf80\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.360468 5081 scope.go:117] "RemoveContainer" containerID="b5c101a43a3295f40820a6e0bc4115d66a17ebbfd93458ef2f3e176a858daf89" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.381876 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/155bac7b-055b-4bca-a155-f5ab13dacf80-kube-api-access-dmdhx" (OuterVolumeSpecName: "kube-api-access-dmdhx") pod "155bac7b-055b-4bca-a155-f5ab13dacf80" (UID: "155bac7b-055b-4bca-a155-f5ab13dacf80"). InnerVolumeSpecName "kube-api-access-dmdhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.382106 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance0aea-account-delete-zkcjp" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.409752 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.419830 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.421502 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron4548-account-delete-zmhl4" Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.422273 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.423802 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.424073 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.424117 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="ovn-northd" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.449007 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.459619 5081 scope.go:117] "RemoveContainer" containerID="5f6d5178719cb62c16b047a9e06ac48b70916f8257a72246b0bf985d185298bd" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.460113 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-internal-tls-certs\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461076 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-ovn-controller-tls-certs\") pod \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461133 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/681f8c15-9cbf-4416-83c4-36429c38a18d-logs\") pod \"681f8c15-9cbf-4416-83c4-36429c38a18d\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461172 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461216 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gqdd\" (UniqueName: \"kubernetes.io/projected/a650c48d-76ae-45a3-b79c-e6e014009769-kube-api-access-6gqdd\") pod \"a650c48d-76ae-45a3-b79c-e6e014009769\" (UID: \"a650c48d-76ae-45a3-b79c-e6e014009769\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461247 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data-custom\") pod \"681f8c15-9cbf-4416-83c4-36429c38a18d\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461318 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-combined-ca-bundle\") pod \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461383 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data\") pod \"681f8c15-9cbf-4416-83c4-36429c38a18d\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461398 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96lpm\" (UniqueName: \"kubernetes.io/projected/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-kube-api-access-96lpm\") pod \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461422 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run\") pod \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461462 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-combined-ca-bundle\") pod \"681f8c15-9cbf-4416-83c4-36429c38a18d\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461486 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6547ead1-44e3-45f3-a668-fff64776f1f6-etc-machine-id\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461519 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-scripts\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461579 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-combined-ca-bundle\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461604 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjlvq\" (UniqueName: \"kubernetes.io/projected/6547ead1-44e3-45f3-a668-fff64776f1f6-kube-api-access-sjlvq\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461652 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-scripts\") pod \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461683 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-public-tls-certs\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461728 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run-ovn\") pod \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461762 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6547ead1-44e3-45f3-a668-fff64776f1f6-logs\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461833 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgfnx\" (UniqueName: \"kubernetes.io/projected/681f8c15-9cbf-4416-83c4-36429c38a18d-kube-api-access-bgfnx\") pod \"681f8c15-9cbf-4416-83c4-36429c38a18d\" (UID: \"681f8c15-9cbf-4416-83c4-36429c38a18d\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461856 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-log-ovn\") pod \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\" (UID: \"686d3fe6-8cc9-4013-a5f3-55fe41ac840e\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.461888 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data-custom\") pod \"6547ead1-44e3-45f3-a668-fff64776f1f6\" (UID: \"6547ead1-44e3-45f3-a668-fff64776f1f6\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.463064 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmdhx\" (UniqueName: \"kubernetes.io/projected/155bac7b-055b-4bca-a155-f5ab13dacf80-kube-api-access-dmdhx\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.475758 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/681f8c15-9cbf-4416-83c4-36429c38a18d-logs" (OuterVolumeSpecName: "logs") pod "681f8c15-9cbf-4416-83c4-36429c38a18d" (UID: "681f8c15-9cbf-4416-83c4-36429c38a18d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.475838 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6547ead1-44e3-45f3-a668-fff64776f1f6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.480657 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6547ead1-44e3-45f3-a668-fff64776f1f6-logs" (OuterVolumeSpecName: "logs") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.481886 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-scripts" (OuterVolumeSpecName: "scripts") pod "686d3fe6-8cc9-4013-a5f3-55fe41ac840e" (UID: "686d3fe6-8cc9-4013-a5f3-55fe41ac840e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.489996 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "686d3fe6-8cc9-4013-a5f3-55fe41ac840e" (UID: "686d3fe6-8cc9-4013-a5f3-55fe41ac840e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.490095 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "686d3fe6-8cc9-4013-a5f3-55fe41ac840e" (UID: "686d3fe6-8cc9-4013-a5f3-55fe41ac840e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.497363 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.509023 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run" (OuterVolumeSpecName: "var-run") pod "686d3fe6-8cc9-4013-a5f3-55fe41ac840e" (UID: "686d3fe6-8cc9-4013-a5f3-55fe41ac840e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.514664 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-scripts" (OuterVolumeSpecName: "scripts") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.517426 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "681f8c15-9cbf-4416-83c4-36429c38a18d" (UID: "681f8c15-9cbf-4416-83c4-36429c38a18d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.523642 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-config-data" (OuterVolumeSpecName: "config-data") pod "155bac7b-055b-4bca-a155-f5ab13dacf80" (UID: "155bac7b-055b-4bca-a155-f5ab13dacf80"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.524956 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/681f8c15-9cbf-4416-83c4-36429c38a18d-kube-api-access-bgfnx" (OuterVolumeSpecName: "kube-api-access-bgfnx") pod "681f8c15-9cbf-4416-83c4-36429c38a18d" (UID: "681f8c15-9cbf-4416-83c4-36429c38a18d"). InnerVolumeSpecName "kube-api-access-bgfnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.533928 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-kube-api-access-96lpm" (OuterVolumeSpecName: "kube-api-access-96lpm") pod "686d3fe6-8cc9-4013-a5f3-55fe41ac840e" (UID: "686d3fe6-8cc9-4013-a5f3-55fe41ac840e"). InnerVolumeSpecName "kube-api-access-96lpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.537068 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "155bac7b-055b-4bca-a155-f5ab13dacf80" (UID: "155bac7b-055b-4bca-a155-f5ab13dacf80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.537341 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a650c48d-76ae-45a3-b79c-e6e014009769-kube-api-access-6gqdd" (OuterVolumeSpecName: "kube-api-access-6gqdd") pod "a650c48d-76ae-45a3-b79c-e6e014009769" (UID: "a650c48d-76ae-45a3-b79c-e6e014009769"). InnerVolumeSpecName "kube-api-access-6gqdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.537740 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6547ead1-44e3-45f3-a668-fff64776f1f6-kube-api-access-sjlvq" (OuterVolumeSpecName: "kube-api-access-sjlvq") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "kube-api-access-sjlvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.537957 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic1ef-account-delete-62l5c" Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.583742 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.585053 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "686d3fe6-8cc9-4013-a5f3-55fe41ac840e" (UID: "686d3fe6-8cc9-4013-a5f3-55fe41ac840e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.587651 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.587824 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-combined-ca-bundle\") pod \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.587917 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-nova-metadata-tls-certs\") pod \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.587958 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp9ss\" (UniqueName: \"kubernetes.io/projected/6d5aeac4-dc04-4a3e-93cf-16b00842df35-kube-api-access-tp9ss\") pod \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.588000 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-config-data\") pod \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.588071 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzlj2\" (UniqueName: \"kubernetes.io/projected/83b5887d-7cec-4aef-8d5c-041346dbc89f-kube-api-access-kzlj2\") pod \"83b5887d-7cec-4aef-8d5c-041346dbc89f\" (UID: \"83b5887d-7cec-4aef-8d5c-041346dbc89f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.594613 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d5aeac4-dc04-4a3e-93cf-16b00842df35-logs\") pod \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\" (UID: \"6d5aeac4-dc04-4a3e-93cf-16b00842df35\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597072 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597131 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597149 5081 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597164 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6547ead1-44e3-45f3-a668-fff64776f1f6-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597177 5081 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597201 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgfnx\" (UniqueName: \"kubernetes.io/projected/681f8c15-9cbf-4416-83c4-36429c38a18d-kube-api-access-bgfnx\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597216 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597230 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/681f8c15-9cbf-4416-83c4-36429c38a18d-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597245 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gqdd\" (UniqueName: \"kubernetes.io/projected/a650c48d-76ae-45a3-b79c-e6e014009769-kube-api-access-6gqdd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597265 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597280 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597295 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597315 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96lpm\" (UniqueName: \"kubernetes.io/projected/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-kube-api-access-96lpm\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597331 5081 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597343 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6547ead1-44e3-45f3-a668-fff64776f1f6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597357 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597372 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjlvq\" (UniqueName: \"kubernetes.io/projected/6547ead1-44e3-45f3-a668-fff64776f1f6-kube-api-access-sjlvq\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.597615 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d5aeac4-dc04-4a3e-93cf-16b00842df35-logs" (OuterVolumeSpecName: "logs") pod "6d5aeac4-dc04-4a3e-93cf-16b00842df35" (UID: "6d5aeac4-dc04-4a3e-93cf-16b00842df35"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.597725 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.597786 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="f1151206-1989-4b3a-bc02-176a6f3cf481" containerName="nova-cell1-conductor-conductor" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.609098 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.622834 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d5aeac4-dc04-4a3e-93cf-16b00842df35-kube-api-access-tp9ss" (OuterVolumeSpecName: "kube-api-access-tp9ss") pod "6d5aeac4-dc04-4a3e-93cf-16b00842df35" (UID: "6d5aeac4-dc04-4a3e-93cf-16b00842df35"). InnerVolumeSpecName "kube-api-access-tp9ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.634542 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone106f-account-delete-kh474" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.634790 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.663283 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.103:5671: connect: connection refused" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.671849 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.677398 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "155bac7b-055b-4bca-a155-f5ab13dacf80" (UID: "155bac7b-055b-4bca-a155-f5ab13dacf80"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.678993 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "681f8c15-9cbf-4416-83c4-36429c38a18d" (UID: "681f8c15-9cbf-4416-83c4-36429c38a18d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.698297 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701157 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-combined-ca-bundle\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701227 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-scripts\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701269 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-internal-tls-certs\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701358 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-internal-tls-certs\") pod \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701413 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-logs\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701456 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-config-data\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701503 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-combined-ca-bundle\") pod \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701538 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83b5887d-7cec-4aef-8d5c-041346dbc89f-kube-api-access-kzlj2" (OuterVolumeSpecName: "kube-api-access-kzlj2") pod "83b5887d-7cec-4aef-8d5c-041346dbc89f" (UID: "83b5887d-7cec-4aef-8d5c-041346dbc89f"). InnerVolumeSpecName "kube-api-access-kzlj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701657 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-httpd-run\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701702 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vptqx\" (UniqueName: \"kubernetes.io/projected/6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5-kube-api-access-vptqx\") pod \"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5\" (UID: \"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701722 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701754 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35ad4758-5e5f-4ba3-84be-a4ae754e9048-logs\") pod \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701797 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-scripts\") pod \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701819 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-public-tls-certs\") pod \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701843 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-config-data\") pod \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701870 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9csvt\" (UniqueName: \"kubernetes.io/projected/ed01e164-93b5-47c7-9ec0-7a00d6942c66-kube-api-access-9csvt\") pod \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\" (UID: \"ed01e164-93b5-47c7-9ec0-7a00d6942c66\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.701908 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5mr8\" (UniqueName: \"kubernetes.io/projected/35ad4758-5e5f-4ba3-84be-a4ae754e9048-kube-api-access-l5mr8\") pod \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\" (UID: \"35ad4758-5e5f-4ba3-84be-a4ae754e9048\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702291 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8341a6f-4783-4bf6-916e-ac655208ba45","Type":"ContainerDied","Data":"34b532534696cfce7fa57360a28da1b4d03320d22387a0dc84f4ff85a37755ca"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702435 5081 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702452 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702467 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp9ss\" (UniqueName: \"kubernetes.io/projected/6d5aeac4-dc04-4a3e-93cf-16b00842df35-kube-api-access-tp9ss\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702480 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzlj2\" (UniqueName: \"kubernetes.io/projected/83b5887d-7cec-4aef-8d5c-041346dbc89f-kube-api-access-kzlj2\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702494 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702505 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d5aeac4-dc04-4a3e-93cf-16b00842df35-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702451 5081 scope.go:117] "RemoveContainer" containerID="7893281dd7f6e155dae82597172d11e93702c9a53318c8564135c0043e49216e" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.702591 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.714178 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-logs" (OuterVolumeSpecName: "logs") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.715127 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement9594-account-delete-mxv78" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.717979 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.718151 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 03 15:53:08 crc kubenswrapper[5081]: E1003 15:53:08.718216 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data podName:7060c6c8-bbe8-47ae-8ef2-4358291dbb61 nodeName:}" failed. No retries permitted until 2025-10-03 15:53:16.718194131 +0000 UTC m=+1515.683750744 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data") pod "rabbitmq-server-0" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61") : configmap "rabbitmq-config-data" not found Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.718324 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35ad4758-5e5f-4ba3-84be-a4ae754e9048-logs" (OuterVolumeSpecName: "logs") pod "35ad4758-5e5f-4ba3-84be-a4ae754e9048" (UID: "35ad4758-5e5f-4ba3-84be-a4ae754e9048"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.750767 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-scripts" (OuterVolumeSpecName: "scripts") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.758198 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed01e164-93b5-47c7-9ec0-7a00d6942c66-kube-api-access-9csvt" (OuterVolumeSpecName: "kube-api-access-9csvt") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "kube-api-access-9csvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.785716 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.785854 5081 generic.go:334] "Generic (PLEG): container finished" podID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerID="c0cc61bb578e6b82b21ec38b2933e461e6db44834efbf165d4d23566ac8055d0" exitCode=0 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.785878 5081 generic.go:334] "Generic (PLEG): container finished" podID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerID="15cacaf50eb36833c76259d52ef81d26141a2d02ee727e2efd6a2a4ccbfc0c93" exitCode=2 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.785888 5081 generic.go:334] "Generic (PLEG): container finished" podID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerID="e89a72236f59fd3fd90bec6d4e3cb013c31ae0d3b3cd93b35b04de7bbc1b544a" exitCode=0 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.785922 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.786080 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerDied","Data":"c0cc61bb578e6b82b21ec38b2933e461e6db44834efbf165d4d23566ac8055d0"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.786107 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerDied","Data":"15cacaf50eb36833c76259d52ef81d26141a2d02ee727e2efd6a2a4ccbfc0c93"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.786118 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerDied","Data":"e89a72236f59fd3fd90bec6d4e3cb013c31ae0d3b3cd93b35b04de7bbc1b544a"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.786322 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5-kube-api-access-vptqx" (OuterVolumeSpecName: "kube-api-access-vptqx") pod "6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5" (UID: "6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5"). InnerVolumeSpecName "kube-api-access-vptqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.795007 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed01e164-93b5-47c7-9ec0-7a00d6942c66","Type":"ContainerDied","Data":"3ed5e97bc2df7e3804d348484f45d7f6b088fdb7d7da8f6ec6a969614d4b835b"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.795221 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.797782 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-scripts" (OuterVolumeSpecName: "scripts") pod "35ad4758-5e5f-4ba3-84be-a4ae754e9048" (UID: "35ad4758-5e5f-4ba3-84be-a4ae754e9048"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.797900 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data" (OuterVolumeSpecName: "config-data") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.798873 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.799227 5081 scope.go:117] "RemoveContainer" containerID="8d9622a52a15f0f59848d03a12a790b4b65eb968515da8ea26ad4197241e283b" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.799411 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35ad4758-5e5f-4ba3-84be-a4ae754e9048-kube-api-access-l5mr8" (OuterVolumeSpecName: "kube-api-access-l5mr8") pod "35ad4758-5e5f-4ba3-84be-a4ae754e9048" (UID: "35ad4758-5e5f-4ba3-84be-a4ae754e9048"). InnerVolumeSpecName "kube-api-access-l5mr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.800378 5081 generic.go:334] "Generic (PLEG): container finished" podID="b12ba3f3-51d4-4c3d-9677-d0a632be0974" containerID="375aa21783d8942e6463ead5ec2f108cd31f251a593b6147df3ae5e0bcca62a7" exitCode=2 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.800901 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b12ba3f3-51d4-4c3d-9677-d0a632be0974","Type":"ContainerDied","Data":"375aa21783d8942e6463ead5ec2f108cd31f251a593b6147df3ae5e0bcca62a7"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.800971 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b12ba3f3-51d4-4c3d-9677-d0a632be0974","Type":"ContainerDied","Data":"bc3c2b9a3a6daa1cd43fdcfd4af1c20e96c47e13c21bc29f6870194bd99c6762"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.801271 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc3c2b9a3a6daa1cd43fdcfd4af1c20e96c47e13c21bc29f6870194bd99c6762" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.803520 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gcrq\" (UniqueName: \"kubernetes.io/projected/b8341a6f-4783-4bf6-916e-ac655208ba45-kube-api-access-9gcrq\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.803552 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-httpd-run\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.803978 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-public-tls-certs\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804027 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-combined-ca-bundle\") pod \"074c619e-3cb7-417e-8192-9d13725cdde5\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804056 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjhbl\" (UniqueName: \"kubernetes.io/projected/074c619e-3cb7-417e-8192-9d13725cdde5-kube-api-access-wjhbl\") pod \"074c619e-3cb7-417e-8192-9d13725cdde5\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804100 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-logs\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804144 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-scripts\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804274 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jcxw\" (UniqueName: \"kubernetes.io/projected/4198a604-1b99-4822-9377-afaaef616d15-kube-api-access-8jcxw\") pod \"4198a604-1b99-4822-9377-afaaef616d15\" (UID: \"4198a604-1b99-4822-9377-afaaef616d15\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804628 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-config-data\") pod \"074c619e-3cb7-417e-8192-9d13725cdde5\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804692 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-internal-tls-certs\") pod \"074c619e-3cb7-417e-8192-9d13725cdde5\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804769 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/074c619e-3cb7-417e-8192-9d13725cdde5-logs\") pod \"074c619e-3cb7-417e-8192-9d13725cdde5\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804857 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-config-data\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804893 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-combined-ca-bundle\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804937 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"b8341a6f-4783-4bf6-916e-ac655208ba45\" (UID: \"b8341a6f-4783-4bf6-916e-ac655208ba45\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.804962 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-public-tls-certs\") pod \"074c619e-3cb7-417e-8192-9d13725cdde5\" (UID: \"074c619e-3cb7-417e-8192-9d13725cdde5\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.805793 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.805829 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.805843 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed01e164-93b5-47c7-9ec0-7a00d6942c66-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.806835 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.806855 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vptqx\" (UniqueName: \"kubernetes.io/projected/6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5-kube-api-access-vptqx\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.806879 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.806906 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35ad4758-5e5f-4ba3-84be-a4ae754e9048-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.806915 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.806924 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9csvt\" (UniqueName: \"kubernetes.io/projected/ed01e164-93b5-47c7-9ec0-7a00d6942c66-kube-api-access-9csvt\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.806932 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5mr8\" (UniqueName: \"kubernetes.io/projected/35ad4758-5e5f-4ba3-84be-a4ae754e9048-kube-api-access-l5mr8\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.814445 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-logs" (OuterVolumeSpecName: "logs") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.815514 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.815751 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.820590 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement9594-account-delete-mxv78" event={"ID":"4198a604-1b99-4822-9377-afaaef616d15","Type":"ContainerDied","Data":"5a142b679a6b1ba417969d37e853604bbe3e8e9150c8a0ee39fa3a5f71c57e71"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.826148 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/074c619e-3cb7-417e-8192-9d13725cdde5-logs" (OuterVolumeSpecName: "logs") pod "074c619e-3cb7-417e-8192-9d13725cdde5" (UID: "074c619e-3cb7-417e-8192-9d13725cdde5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.828442 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement9594-account-delete-mxv78" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.837203 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.837708 5081 generic.go:334] "Generic (PLEG): container finished" podID="7abf152b-a4ec-4114-bb59-491582952b05" containerID="d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" exitCode=0 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.837777 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.837786 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7abf152b-a4ec-4114-bb59-491582952b05","Type":"ContainerDied","Data":"d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.837812 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7abf152b-a4ec-4114-bb59-491582952b05","Type":"ContainerDied","Data":"2ce8e8a7fea6b491c4bd829ce12a91127cbdbfbe19f13602cc66fdab14e50c0b"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.844602 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4198a604-1b99-4822-9377-afaaef616d15-kube-api-access-8jcxw" (OuterVolumeSpecName: "kube-api-access-8jcxw") pod "4198a604-1b99-4822-9377-afaaef616d15" (UID: "4198a604-1b99-4822-9377-afaaef616d15"). InnerVolumeSpecName "kube-api-access-8jcxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.852755 5081 generic.go:334] "Generic (PLEG): container finished" podID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerID="de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d" exitCode=0 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.852835 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.852954 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1","Type":"ContainerDied","Data":"de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.853010 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1","Type":"ContainerDied","Data":"21d95d68569656a54923a26532efc674166d6ab27e3d18d4627f4a5fcb1dcb60"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.852771 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.858704 5081 scope.go:117] "RemoveContainer" containerID="71bc762a04da349305dfb7aba1fc1915351db7ac6e3d40db700ab69ee2492adf" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.865200 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df6654c96-f7vp5" event={"ID":"bd5af114-d170-46ce-8a46-ec0b65ddb545","Type":"ContainerStarted","Data":"1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.865462 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" containerID="cri-o://dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335" gracePeriod=30 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.865802 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.865828 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.865868 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" containerID="cri-o://1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730" gracePeriod=30 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.880827 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.882822 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8341a6f-4783-4bf6-916e-ac655208ba45-kube-api-access-9gcrq" (OuterVolumeSpecName: "kube-api-access-9gcrq") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "kube-api-access-9gcrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.895943 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic1ef-account-delete-62l5c" event={"ID":"6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5","Type":"ContainerDied","Data":"64462caa445d3495647047cefb839dd94ebf0cd1aeae1ec5260082beed69fb7c"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.896353 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic1ef-account-delete-62l5c" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.902878 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-scripts" (OuterVolumeSpecName: "scripts") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.908463 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-default\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.908581 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-combined-ca-bundle\") pod \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.908626 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-combined-ca-bundle\") pod \"7abf152b-a4ec-4114-bb59-491582952b05\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.908680 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data-custom\") pod \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.908718 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-config\") pod \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.908822 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49mxf\" (UniqueName: \"kubernetes.io/projected/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kube-api-access-49mxf\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.908918 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kolla-config\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909032 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-secrets\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909069 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-config-data\") pod \"7abf152b-a4ec-4114-bb59-491582952b05\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909093 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-public-tls-certs\") pod \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909113 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-operator-scripts\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909156 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kdnv\" (UniqueName: \"kubernetes.io/projected/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-api-access-9kdnv\") pod \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909193 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-galera-tls-certs\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909231 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49cb0be2-f988-48bc-afd9-bb4bd348de1f-logs\") pod \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909276 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ws5w\" (UniqueName: \"kubernetes.io/projected/7abf152b-a4ec-4114-bb59-491582952b05-kube-api-access-2ws5w\") pod \"7abf152b-a4ec-4114-bb59-491582952b05\" (UID: \"7abf152b-a4ec-4114-bb59-491582952b05\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909330 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909346 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-internal-tls-certs\") pod \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909370 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-generated\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909390 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data\") pod \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909408 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-certs\") pod \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909438 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhbkv\" (UniqueName: \"kubernetes.io/projected/49cb0be2-f988-48bc-afd9-bb4bd348de1f-kube-api-access-hhbkv\") pod \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\" (UID: \"49cb0be2-f988-48bc-afd9-bb4bd348de1f\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909457 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-combined-ca-bundle\") pod \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\" (UID: \"14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909474 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-combined-ca-bundle\") pod \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\" (UID: \"b12ba3f3-51d4-4c3d-9677-d0a632be0974\") " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.909988 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.910003 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.910013 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jcxw\" (UniqueName: \"kubernetes.io/projected/4198a604-1b99-4822-9377-afaaef616d15-kube-api-access-8jcxw\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.910023 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/074c619e-3cb7-417e-8192-9d13725cdde5-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.910042 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.910054 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.910063 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gcrq\" (UniqueName: \"kubernetes.io/projected/b8341a6f-4783-4bf6-916e-ac655208ba45-kube-api-access-9gcrq\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.910071 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8341a6f-4783-4bf6-916e-ac655208ba45-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.915120 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.921501 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.927386 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.929413 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.930433 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49cb0be2-f988-48bc-afd9-bb4bd348de1f-logs" (OuterVolumeSpecName: "logs") pod "49cb0be2-f988-48bc-afd9-bb4bd348de1f" (UID: "49cb0be2-f988-48bc-afd9-bb4bd348de1f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.931613 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/074c619e-3cb7-417e-8192-9d13725cdde5-kube-api-access-wjhbl" (OuterVolumeSpecName: "kube-api-access-wjhbl") pod "074c619e-3cb7-417e-8192-9d13725cdde5" (UID: "074c619e-3cb7-417e-8192-9d13725cdde5"). InnerVolumeSpecName "kube-api-access-wjhbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.932724 5081 generic.go:334] "Generic (PLEG): container finished" podID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerID="65b717a24fa17c44796587db00e37ce3290f0faba7d3338dd5922b855e7ae029" exitCode=143 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.933095 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-577c5877f7-gslrj" event={"ID":"fcbf652f-a193-47f7-872b-e9864a40cd0a","Type":"ContainerDied","Data":"65b717a24fa17c44796587db00e37ce3290f0faba7d3338dd5922b855e7ae029"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.949983 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7abf152b-a4ec-4114-bb59-491582952b05-kube-api-access-2ws5w" (OuterVolumeSpecName: "kube-api-access-2ws5w") pod "7abf152b-a4ec-4114-bb59-491582952b05" (UID: "7abf152b-a4ec-4114-bb59-491582952b05"). InnerVolumeSpecName "kube-api-access-2ws5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.958432 5081 generic.go:334] "Generic (PLEG): container finished" podID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerID="150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3" exitCode=0 Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.958808 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57cd5fc748-p2rdp" event={"ID":"49cb0be2-f988-48bc-afd9-bb4bd348de1f","Type":"ContainerDied","Data":"150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.958912 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57cd5fc748-p2rdp" event={"ID":"49cb0be2-f988-48bc-afd9-bb4bd348de1f","Type":"ContainerDied","Data":"104a21cc5931b0e376a6fbe4b797b78ae8bc6426e70ed6f12cb1bdfee5feb1b9"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.959161 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57cd5fc748-p2rdp" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.963538 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "155bac7b-055b-4bca-a155-f5ab13dacf80" (UID: "155bac7b-055b-4bca-a155-f5ab13dacf80"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.978302 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-api-access-9kdnv" (OuterVolumeSpecName: "kube-api-access-9kdnv") pod "b12ba3f3-51d4-4c3d-9677-d0a632be0974" (UID: "b12ba3f3-51d4-4c3d-9677-d0a632be0974"). InnerVolumeSpecName "kube-api-access-9kdnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.978409 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49cb0be2-f988-48bc-afd9-bb4bd348de1f-kube-api-access-hhbkv" (OuterVolumeSpecName: "kube-api-access-hhbkv") pod "49cb0be2-f988-48bc-afd9-bb4bd348de1f" (UID: "49cb0be2-f988-48bc-afd9-bb4bd348de1f"). InnerVolumeSpecName "kube-api-access-hhbkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.978412 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kube-api-access-49mxf" (OuterVolumeSpecName: "kube-api-access-49mxf") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "kube-api-access-49mxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.978627 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-secrets" (OuterVolumeSpecName: "secrets") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.979807 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6547ead1-44e3-45f3-a668-fff64776f1f6" (UID: "6547ead1-44e3-45f3-a668-fff64776f1f6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.983300 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "49cb0be2-f988-48bc-afd9-bb4bd348de1f" (UID: "49cb0be2-f988-48bc-afd9-bb4bd348de1f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985485 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-58db4df78-q9st4" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985493 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985593 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"074c619e-3cb7-417e-8192-9d13725cdde5","Type":"ContainerDied","Data":"3f6f931d1334ffcab0850c9c051c18e104f77cb04c5b292b4915c95229ad8655"} Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985665 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985730 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985777 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985828 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance0aea-account-delete-zkcjp" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985874 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5f7bd66f74-sbcrq" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985941 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kngsq" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.985976 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron4548-account-delete-zmhl4" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.986146 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 15:53:08 crc kubenswrapper[5081]: I1003 15:53:08.986177 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone106f-account-delete-kh474" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017290 5081 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017324 5081 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017333 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kdnv\" (UniqueName: \"kubernetes.io/projected/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-api-access-9kdnv\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017342 5081 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/155bac7b-055b-4bca-a155-f5ab13dacf80-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017354 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6547ead1-44e3-45f3-a668-fff64776f1f6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017362 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49cb0be2-f988-48bc-afd9-bb4bd348de1f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017370 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ws5w\" (UniqueName: \"kubernetes.io/projected/7abf152b-a4ec-4114-bb59-491582952b05-kube-api-access-2ws5w\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017379 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017389 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhbkv\" (UniqueName: \"kubernetes.io/projected/49cb0be2-f988-48bc-afd9-bb4bd348de1f-kube-api-access-hhbkv\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017399 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017408 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjhbl\" (UniqueName: \"kubernetes.io/projected/074c619e-3cb7-417e-8192-9d13725cdde5-kube-api-access-wjhbl\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017416 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017425 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49mxf\" (UniqueName: \"kubernetes.io/projected/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kube-api-access-49mxf\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.017433 5081 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.033018 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5df6654c96-f7vp5" podStartSLOduration=8.032991992 podStartE2EDuration="8.032991992s" podCreationTimestamp="2025-10-03 15:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 15:53:08.98005472 +0000 UTC m=+1507.945611333" watchObservedRunningTime="2025-10-03 15:53:09.032991992 +0000 UTC m=+1507.998548605" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.063390 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.089154 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.116839 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "686d3fe6-8cc9-4013-a5f3-55fe41ac840e" (UID: "686d3fe6-8cc9-4013-a5f3-55fe41ac840e"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.120929 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data" (OuterVolumeSpecName: "config-data") pod "681f8c15-9cbf-4416-83c4-36429c38a18d" (UID: "681f8c15-9cbf-4416-83c4-36429c38a18d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.122854 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/686d3fe6-8cc9-4013-a5f3-55fe41ac840e-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.122905 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.122919 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.122933 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681f8c15-9cbf-4416-83c4-36429c38a18d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.188771 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-config-data" (OuterVolumeSpecName: "config-data") pod "6d5aeac4-dc04-4a3e-93cf-16b00842df35" (UID: "6d5aeac4-dc04-4a3e-93cf-16b00842df35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.199629 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-config-data" (OuterVolumeSpecName: "config-data") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.224161 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.224235 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.260692 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.263355 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d5aeac4-dc04-4a3e-93cf-16b00842df35" (UID: "6d5aeac4-dc04-4a3e-93cf-16b00842df35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.269548 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-config-data" (OuterVolumeSpecName: "config-data") pod "7abf152b-a4ec-4114-bb59-491582952b05" (UID: "7abf152b-a4ec-4114-bb59-491582952b05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.308867 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.323822 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.326831 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.331804 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.331930 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.332009 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.332141 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: E1003 15:53:09.329539 5081 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:09 crc kubenswrapper[5081]: E1003 15:53:09.332347 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data podName:5344c615-93ae-4a4a-95b1-3bbe3327f42e nodeName:}" failed. No retries permitted until 2025-10-03 15:53:17.332323682 +0000 UTC m=+1516.297880295 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data") pod "rabbitmq-cell1-server-0" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e") : configmap "rabbitmq-cell1-config-data" not found Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.363713 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "074c619e-3cb7-417e-8192-9d13725cdde5" (UID: "074c619e-3cb7-417e-8192-9d13725cdde5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.393983 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-config-data" (OuterVolumeSpecName: "config-data") pod "074c619e-3cb7-417e-8192-9d13725cdde5" (UID: "074c619e-3cb7-417e-8192-9d13725cdde5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.403398 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.435896 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.435938 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.435951 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.458961 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49cb0be2-f988-48bc-afd9-bb4bd348de1f" (UID: "49cb0be2-f988-48bc-afd9-bb4bd348de1f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.462823 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7abf152b-a4ec-4114-bb59-491582952b05" (UID: "7abf152b-a4ec-4114-bb59-491582952b05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.464433 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "b12ba3f3-51d4-4c3d-9677-d0a632be0974" (UID: "b12ba3f3-51d4-4c3d-9677-d0a632be0974"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.478624 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b12ba3f3-51d4-4c3d-9677-d0a632be0974" (UID: "b12ba3f3-51d4-4c3d-9677-d0a632be0974"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.509868 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35ad4758-5e5f-4ba3-84be-a4ae754e9048" (UID: "35ad4758-5e5f-4ba3-84be-a4ae754e9048"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.531574 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.546307 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf152b-a4ec-4114-bb59-491582952b05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.546379 5081 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.546392 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.546404 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.546415 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.546423 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.548706 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.593111 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6d5aeac4-dc04-4a3e-93cf-16b00842df35" (UID: "6d5aeac4-dc04-4a3e-93cf-16b00842df35"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.623745 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data" (OuterVolumeSpecName: "config-data") pod "49cb0be2-f988-48bc-afd9-bb4bd348de1f" (UID: "49cb0be2-f988-48bc-afd9-bb4bd348de1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.663355 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "074c619e-3cb7-417e-8192-9d13725cdde5" (UID: "074c619e-3cb7-417e-8192-9d13725cdde5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.664619 5081 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d5aeac4-dc04-4a3e-93cf-16b00842df35-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.664696 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.664706 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.664716 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.675510 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-config-data" (OuterVolumeSpecName: "config-data") pod "b8341a6f-4783-4bf6-916e-ac655208ba45" (UID: "b8341a6f-4783-4bf6-916e-ac655208ba45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.679766 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "49cb0be2-f988-48bc-afd9-bb4bd348de1f" (UID: "49cb0be2-f988-48bc-afd9-bb4bd348de1f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.691925 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "074c619e-3cb7-417e-8192-9d13725cdde5" (UID: "074c619e-3cb7-417e-8192-9d13725cdde5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.702600 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ed01e164-93b5-47c7-9ec0-7a00d6942c66" (UID: "ed01e164-93b5-47c7-9ec0-7a00d6942c66"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.726073 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" (UID: "14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.749969 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "b12ba3f3-51d4-4c3d-9677-d0a632be0974" (UID: "b12ba3f3-51d4-4c3d-9677-d0a632be0974"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.757994 5081 scope.go:117] "RemoveContainer" containerID="20c238aeaa3c71e4c9d06c0d2978a9a624c469b91b5eb72c44d08c1bce7fd2a9" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.766916 5081 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.766957 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8341a6f-4783-4bf6-916e-ac655208ba45-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.770202 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c619e-3cb7-417e-8192-9d13725cdde5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.770272 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.770287 5081 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12ba3f3-51d4-4c3d-9677-d0a632be0974-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.770303 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed01e164-93b5-47c7-9ec0-7a00d6942c66-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.775059 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5f7bd66f74-sbcrq"] Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.803352 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-5f7bd66f74-sbcrq"] Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.809968 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.821170 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-config-data" (OuterVolumeSpecName: "config-data") pod "35ad4758-5e5f-4ba3-84be-a4ae754e9048" (UID: "35ad4758-5e5f-4ba3-84be-a4ae754e9048"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.853996 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.879214 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="262952b6-434e-4da7-8cdb-b7073a71f13c" path="/var/lib/kubelet/pods/262952b6-434e-4da7-8cdb-b7073a71f13c/volumes" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.879383 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.880038 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" path="/var/lib/kubelet/pods/681f8c15-9cbf-4416-83c4-36429c38a18d/volumes" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.911862 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "49cb0be2-f988-48bc-afd9-bb4bd348de1f" (UID: "49cb0be2-f988-48bc-afd9-bb4bd348de1f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.953008 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbc5b56e-83ed-460e-a57d-2c51df24c5dc" path="/var/lib/kubelet/pods/cbc5b56e-83ed-460e-a57d-2c51df24c5dc/volumes" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.960062 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "35ad4758-5e5f-4ba3-84be-a4ae754e9048" (UID: "35ad4758-5e5f-4ba3-84be-a4ae754e9048"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.986926 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-memcached-tls-certs\") pod \"ca0c9b41-c081-4a81-90f2-730e16c7d347\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.986972 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msrt7\" (UniqueName: \"kubernetes.io/projected/ca0c9b41-c081-4a81-90f2-730e16c7d347-kube-api-access-msrt7\") pod \"ca0c9b41-c081-4a81-90f2-730e16c7d347\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.987084 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-combined-ca-bundle\") pod \"ca0c9b41-c081-4a81-90f2-730e16c7d347\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.987146 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-kolla-config\") pod \"ca0c9b41-c081-4a81-90f2-730e16c7d347\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.987200 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-config-data\") pod \"ca0c9b41-c081-4a81-90f2-730e16c7d347\" (UID: \"ca0c9b41-c081-4a81-90f2-730e16c7d347\") " Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.987874 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49cb0be2-f988-48bc-afd9-bb4bd348de1f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.987891 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.993260 5081 scope.go:117] "RemoveContainer" containerID="469eeeba4c4c6773e03905fab4594c164e4caa8017190470fb7f3ea24e807fc1" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.995180 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "ca0c9b41-c081-4a81-90f2-730e16c7d347" (UID: "ca0c9b41-c081-4a81-90f2-730e16c7d347"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:09 crc kubenswrapper[5081]: I1003 15:53:09.998413 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-config-data" (OuterVolumeSpecName: "config-data") pod "ca0c9b41-c081-4a81-90f2-730e16c7d347" (UID: "ca0c9b41-c081-4a81-90f2-730e16c7d347"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.017733 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca0c9b41-c081-4a81-90f2-730e16c7d347-kube-api-access-msrt7" (OuterVolumeSpecName: "kube-api-access-msrt7") pod "ca0c9b41-c081-4a81-90f2-730e16c7d347" (UID: "ca0c9b41-c081-4a81-90f2-730e16c7d347"). InnerVolumeSpecName "kube-api-access-msrt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.018705 5081 generic.go:334] "Generic (PLEG): container finished" podID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerID="dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335" exitCode=143 Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.034021 5081 generic.go:334] "Generic (PLEG): container finished" podID="ca0c9b41-c081-4a81-90f2-730e16c7d347" containerID="e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727" exitCode=0 Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.034149 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.037602 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.050661 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca0c9b41-c081-4a81-90f2-730e16c7d347" (UID: "ca0c9b41-c081-4a81-90f2-730e16c7d347"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.078811 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "35ad4758-5e5f-4ba3-84be-a4ae754e9048" (UID: "35ad4758-5e5f-4ba3-84be-a4ae754e9048"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.091703 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "ca0c9b41-c081-4a81-90f2-730e16c7d347" (UID: "ca0c9b41-c081-4a81-90f2-730e16c7d347"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.097647 5081 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098363 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca0c9b41-c081-4a81-90f2-730e16c7d347-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098778 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098827 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone106f-account-delete-kh474"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098841 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df6654c96-f7vp5" event={"ID":"bd5af114-d170-46ce-8a46-ec0b65ddb545","Type":"ContainerDied","Data":"dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335"} Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098868 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone106f-account-delete-kh474"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098882 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098896 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098906 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance0aea-account-delete-zkcjp"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098917 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance0aea-account-delete-zkcjp"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098929 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ca0c9b41-c081-4a81-90f2-730e16c7d347","Type":"ContainerDied","Data":"e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727"} Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.098944 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ca0c9b41-c081-4a81-90f2-730e16c7d347","Type":"ContainerDied","Data":"c047b38ab189810d0c64c1322b3968c39310579429c28ad02431445744f4867b"} Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.099532 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/35ad4758-5e5f-4ba3-84be-a4ae754e9048-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.099590 5081 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.099606 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msrt7\" (UniqueName: \"kubernetes.io/projected/ca0c9b41-c081-4a81-90f2-730e16c7d347-kube-api-access-msrt7\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.099624 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca0c9b41-c081-4a81-90f2-730e16c7d347-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.112493 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.114029 5081 scope.go:117] "RemoveContainer" containerID="d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.122941 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.129647 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement9594-account-delete-mxv78"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.137211 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement9594-account-delete-mxv78"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.142816 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron4548-account-delete-zmhl4"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.152216 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron4548-account-delete-zmhl4"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.169056 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic1ef-account-delete-62l5c"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.181539 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapic1ef-account-delete-62l5c"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.203083 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kngsq"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.211628 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-kngsq"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.229744 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.236987 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.245723 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.252646 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.333855 5081 scope.go:117] "RemoveContainer" containerID="d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.339235 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1\": container with ID starting with d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1 not found: ID does not exist" containerID="d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.339327 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1"} err="failed to get container status \"d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1\": rpc error: code = NotFound desc = could not find container \"d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1\": container with ID starting with d3a227e8ca713f0ec849605ef44a614bb9060bbb6b818d239b89f006c2bd5fc1 not found: ID does not exist" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.340121 5081 scope.go:117] "RemoveContainer" containerID="de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.400172 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.418053 5081 scope.go:117] "RemoveContainer" containerID="4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.429626 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.445806 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.459916 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.477771 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.514452 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.517128 5081 scope.go:117] "RemoveContainer" containerID="de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.518765 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d\": container with ID starting with de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d not found: ID does not exist" containerID="de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.518807 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d"} err="failed to get container status \"de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d\": rpc error: code = NotFound desc = could not find container \"de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d\": container with ID starting with de21640567fca2f7935033191955f22cf11f5f60189d47fcceb33f73447e493d not found: ID does not exist" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.518836 5081 scope.go:117] "RemoveContainer" containerID="4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.519732 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320\": container with ID starting with 4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320 not found: ID does not exist" containerID="4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.519802 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320"} err="failed to get container status \"4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320\": rpc error: code = NotFound desc = could not find container \"4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320\": container with ID starting with 4d41a0d57fa560f505039d081fcf0fb0bb1f4d91608ece182936fa31a4588320 not found: ID does not exist" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.519839 5081 scope.go:117] "RemoveContainer" containerID="d584375736a0b9b1bb5ab0e43ef15ea58c0ab9847c889a51251a6379833bea5f" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.524508 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.540497 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.558745 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.568293 5081 scope.go:117] "RemoveContainer" containerID="150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.575674 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.590734 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-58db4df78-q9st4"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.596522 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-58db4df78-q9st4"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.603599 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.610704 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.617241 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-57cd5fc748-p2rdp"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.627726 5081 scope.go:117] "RemoveContainer" containerID="8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.635974 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-57cd5fc748-p2rdp"] Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.654266 5081 scope.go:117] "RemoveContainer" containerID="150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.654931 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3\": container with ID starting with 150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3 not found: ID does not exist" containerID="150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.654980 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3"} err="failed to get container status \"150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3\": rpc error: code = NotFound desc = could not find container \"150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3\": container with ID starting with 150265969f9325026c4e2268ab2f4ce1386eea91d4e287d5537f43f22f7078b3 not found: ID does not exist" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.655011 5081 scope.go:117] "RemoveContainer" containerID="8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.655420 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f\": container with ID starting with 8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f not found: ID does not exist" containerID="8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.655443 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f"} err="failed to get container status \"8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f\": rpc error: code = NotFound desc = could not find container \"8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f\": container with ID starting with 8561a1c270b86dfe653cccab78a37a533d8337799e35b4e0003982ff7b6e474f not found: ID does not exist" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.655457 5081 scope.go:117] "RemoveContainer" containerID="a11a2c348b41af6175d5a702fc5ef23abb0b4539c6157c033c27932942b63f91" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.682780 5081 scope.go:117] "RemoveContainer" containerID="a66b52b10331b1c92cc1455a575bb5bad525d4a39ad38db2e4104fb8919818e2" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.712079 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded01e164_93b5_47c7_9ec0_7a00d6942c66.slice/crio-3ed5e97bc2df7e3804d348484f45d7f6b088fdb7d7da8f6ec6a969614d4b835b\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49cb0be2_f988_48bc_afd9_bb4bd348de1f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35ad4758_5e5f_4ba3_84be_a4ae754e9048.slice/crio-f594147252eaa981370a57f87e2776304e0af74fd29f9f3d68280dfb42f3df53\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d5aeac4_dc04_4a3e_93cf_16b00842df35.slice/crio-ff922eab8e63389a4e1886bd85210654c942dd59ae88953ced077b10dd8d3f38\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8341a6f_4783_4bf6_916e_ac655208ba45.slice/crio-34b532534696cfce7fa57360a28da1b4d03320d22387a0dc84f4ff85a37755ca\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca0c9b41_c081_4a81_90f2_730e16c7d347.slice/crio-c047b38ab189810d0c64c1322b3968c39310579429c28ad02431445744f4867b\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod074c619e_3cb7_417e_8192_9d13725cdde5.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded01e164_93b5_47c7_9ec0_7a00d6942c66.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb12ba3f3_51d4_4c3d_9677_d0a632be0974.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8341a6f_4783_4bf6_916e_ac655208ba45.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35ad4758_5e5f_4ba3_84be_a4ae754e9048.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d5aeac4_dc04_4a3e_93cf_16b00842df35.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49cb0be2_f988_48bc_afd9_bb4bd348de1f.slice/crio-104a21cc5931b0e376a6fbe4b797b78ae8bc6426e70ed6f12cb1bdfee5feb1b9\": RecentStats: unable to find data in memory cache]" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.730787 5081 scope.go:117] "RemoveContainer" containerID="e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.767240 5081 scope.go:117] "RemoveContainer" containerID="e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.767889 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727\": container with ID starting with e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727 not found: ID does not exist" containerID="e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727" Oct 03 15:53:10 crc kubenswrapper[5081]: I1003 15:53:10.767930 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727"} err="failed to get container status \"e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727\": rpc error: code = NotFound desc = could not find container \"e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727\": container with ID starting with e624e265b0afb7ec50ed1700cfd5f17308920a5719be46d99f603ed7215a7727 not found: ID does not exist" Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.884543 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.894716 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.899812 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 03 15:53:10 crc kubenswrapper[5081]: E1003 15:53:10.899866 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerName="galera" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.085262 5081 generic.go:334] "Generic (PLEG): container finished" podID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerID="9764cd3101ebf1f837d65e3b2926078df427b405bf605c5170fe2cd8a77e323b" exitCode=0 Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.085669 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" event={"ID":"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19","Type":"ContainerDied","Data":"9764cd3101ebf1f837d65e3b2926078df427b405bf605c5170fe2cd8a77e323b"} Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.394800 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.547127 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnz78\" (UniqueName: \"kubernetes.io/projected/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-kube-api-access-pnz78\") pod \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.547196 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-logs\") pod \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.547241 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data\") pod \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.547258 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data-custom\") pod \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.547293 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-combined-ca-bundle\") pod \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\" (UID: \"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.548695 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-logs" (OuterVolumeSpecName: "logs") pod "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" (UID: "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.573225 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-kube-api-access-pnz78" (OuterVolumeSpecName: "kube-api-access-pnz78") pod "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" (UID: "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19"). InnerVolumeSpecName "kube-api-access-pnz78". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.574826 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" (UID: "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.577767 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.617380 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" (UID: "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.658934 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnz78\" (UniqueName: \"kubernetes.io/projected/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-kube-api-access-pnz78\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.658976 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.658986 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.658995 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.671043 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data" (OuterVolumeSpecName: "config-data") pod "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" (UID: "f64376f5-c10d-45cb-a9eb-81d9a4cd8b19"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.757987 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760187 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-public-tls-certs\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760278 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-fernet-keys\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760303 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-credential-keys\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760335 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljcvj\" (UniqueName: \"kubernetes.io/projected/254b0c39-e3af-4a48-a954-5ff334d36670-kube-api-access-ljcvj\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760357 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-internal-tls-certs\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760388 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-combined-ca-bundle\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760427 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-scripts\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760471 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-config-data\") pod \"254b0c39-e3af-4a48-a954-5ff334d36670\" (UID: \"254b0c39-e3af-4a48-a954-5ff334d36670\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.760804 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.767027 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/254b0c39-e3af-4a48-a954-5ff334d36670-kube-api-access-ljcvj" (OuterVolumeSpecName: "kube-api-access-ljcvj") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "kube-api-access-ljcvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.768952 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.768980 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-scripts" (OuterVolumeSpecName: "scripts") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.771702 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.792910 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-config-data" (OuterVolumeSpecName: "config-data") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.863761 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.864939 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-erlang-cookie-secret\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.864976 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-server-conf\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.864994 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865012 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-tls\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865044 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865085 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-plugins-conf\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865119 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-confd\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865152 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-plugins\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865204 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbxsb\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-kube-api-access-pbxsb\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865222 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-erlang-cookie\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865245 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-pod-info\") pod \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\" (UID: \"7060c6c8-bbe8-47ae-8ef2-4358291dbb61\") " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865479 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865490 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865498 5081 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865506 5081 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865516 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljcvj\" (UniqueName: \"kubernetes.io/projected/254b0c39-e3af-4a48-a954-5ff334d36670-kube-api-access-ljcvj\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.865527 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.867868 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.886203 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.886230 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.886439 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" path="/var/lib/kubelet/pods/074c619e-3cb7-417e-8192-9d13725cdde5/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.886663 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.889279 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" path="/var/lib/kubelet/pods/14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.890625 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="155bac7b-055b-4bca-a155-f5ab13dacf80" path="/var/lib/kubelet/pods/155bac7b-055b-4bca-a155-f5ab13dacf80/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.893296 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" path="/var/lib/kubelet/pods/35ad4758-5e5f-4ba3-84be-a4ae754e9048/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.897450 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4198a604-1b99-4822-9377-afaaef616d15" path="/var/lib/kubelet/pods/4198a604-1b99-4822-9377-afaaef616d15/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.900961 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" path="/var/lib/kubelet/pods/49cb0be2-f988-48bc-afd9-bb4bd348de1f/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.902195 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" path="/var/lib/kubelet/pods/6547ead1-44e3-45f3-a668-fff64776f1f6/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.904345 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" path="/var/lib/kubelet/pods/686d3fe6-8cc9-4013-a5f3-55fe41ac840e/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.905210 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5" path="/var/lib/kubelet/pods/6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.905969 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" path="/var/lib/kubelet/pods/6d5aeac4-dc04-4a3e-93cf-16b00842df35/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.918032 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7abf152b-a4ec-4114-bb59-491582952b05" path="/var/lib/kubelet/pods/7abf152b-a4ec-4114-bb59-491582952b05/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.918969 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83b5887d-7cec-4aef-8d5c-041346dbc89f" path="/var/lib/kubelet/pods/83b5887d-7cec-4aef-8d5c-041346dbc89f/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.919536 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a650c48d-76ae-45a3-b79c-e6e014009769" path="/var/lib/kubelet/pods/a650c48d-76ae-45a3-b79c-e6e014009769/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.920137 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b12ba3f3-51d4-4c3d-9677-d0a632be0974" path="/var/lib/kubelet/pods/b12ba3f3-51d4-4c3d-9677-d0a632be0974/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.922478 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-pod-info" (OuterVolumeSpecName: "pod-info") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.927867 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.938941 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-kube-api-access-pbxsb" (OuterVolumeSpecName: "kube-api-access-pbxsb") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "kube-api-access-pbxsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.939055 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.939126 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.941319 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "254b0c39-e3af-4a48-a954-5ff334d36670" (UID: "254b0c39-e3af-4a48-a954-5ff334d36670"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.942769 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" path="/var/lib/kubelet/pods/b8341a6f-4783-4bf6-916e-ac655208ba45/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.943640 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca0c9b41-c081-4a81-90f2-730e16c7d347" path="/var/lib/kubelet/pods/ca0c9b41-c081-4a81-90f2-730e16c7d347/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.944153 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfe2dfd3-8b6d-466a-92f5-68e649d31298" path="/var/lib/kubelet/pods/dfe2dfd3-8b6d-466a-92f5-68e649d31298/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.945712 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" path="/var/lib/kubelet/pods/ed01e164-93b5-47c7-9ec0-7a00d6942c66/volumes" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967282 5081 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967321 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967337 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967354 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967372 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbxsb\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-kube-api-access-pbxsb\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967385 5081 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967399 5081 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967438 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967454 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:11 crc kubenswrapper[5081]: I1003 15:53:11.967467 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254b0c39-e3af-4a48-a954-5ff334d36670-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.041864 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.049786 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data" (OuterVolumeSpecName: "config-data") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.070456 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.070487 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.152791 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.162434 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-server-conf" (OuterVolumeSpecName: "server-conf") pod "7060c6c8-bbe8-47ae-8ef2-4358291dbb61" (UID: "7060c6c8-bbe8-47ae-8ef2-4358291dbb61"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.182164 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.182209 5081 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7060c6c8-bbe8-47ae-8ef2-4358291dbb61-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.200349 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.220759 5081 generic.go:334] "Generic (PLEG): container finished" podID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerID="5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e" exitCode=0 Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.220887 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.223085 5081 generic.go:334] "Generic (PLEG): container finished" podID="f1151206-1989-4b3a-bc02-176a6f3cf481" containerID="8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3" exitCode=0 Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.226270 5081 generic.go:334] "Generic (PLEG): container finished" podID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerID="b01bf452cb29e0baf0468dc3879379123d94b893f16e315da708a69a521d9ace" exitCode=0 Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.227629 5081 generic.go:334] "Generic (PLEG): container finished" podID="254b0c39-e3af-4a48-a954-5ff334d36670" containerID="9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18" exitCode=0 Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.227704 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f5c54b599-s8jwr" Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.235894 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.235962 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.240177 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.240656 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.253708 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.253779 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.259676 5081 generic.go:334] "Generic (PLEG): container finished" podID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerID="d458d29d42b270ecda96b4132690bafef24693a1bcafe119e2fb5b9bab9353b5" exitCode=0 Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.264987 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.265062 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360138 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bcffbc9c7-qbr72" event={"ID":"f64376f5-c10d-45cb-a9eb-81d9a4cd8b19","Type":"ContainerDied","Data":"97b8550bf479f277c469071e93c069ed04e84c9543820f2a58f6917640c84441"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360198 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7060c6c8-bbe8-47ae-8ef2-4358291dbb61","Type":"ContainerDied","Data":"5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360224 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7060c6c8-bbe8-47ae-8ef2-4358291dbb61","Type":"ContainerDied","Data":"69af4ca883d265ec585015516f9d0443b34a57fe43f25c6b46a3ec2ad9d36eea"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360237 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"f1151206-1989-4b3a-bc02-176a6f3cf481","Type":"ContainerDied","Data":"8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360253 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5344c615-93ae-4a4a-95b1-3bbe3327f42e","Type":"ContainerDied","Data":"b01bf452cb29e0baf0468dc3879379123d94b893f16e315da708a69a521d9ace"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360269 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f5c54b599-s8jwr" event={"ID":"254b0c39-e3af-4a48-a954-5ff334d36670","Type":"ContainerDied","Data":"9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360282 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f5c54b599-s8jwr" event={"ID":"254b0c39-e3af-4a48-a954-5ff334d36670","Type":"ContainerDied","Data":"c65a84dea2d07c0406ddc3330207acc536f4b55adbfdb1e29cee19462a1e83ff"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360293 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerDied","Data":"d458d29d42b270ecda96b4132690bafef24693a1bcafe119e2fb5b9bab9353b5"} Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.360315 5081 scope.go:117] "RemoveContainer" containerID="9764cd3101ebf1f837d65e3b2926078df427b405bf605c5170fe2cd8a77e323b" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.415293 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7bcffbc9c7-qbr72"] Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.444027 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-7bcffbc9c7-qbr72"] Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.451974 5081 scope.go:117] "RemoveContainer" containerID="424ab139179e1f0e89a94103edf7bef82522d7b08ab2d180de8ddb2ba8927c6e" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.459010 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7f5c54b599-s8jwr"] Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.474530 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7f5c54b599-s8jwr"] Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.483935 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.491671 5081 scope.go:117] "RemoveContainer" containerID="5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.494759 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.525462 5081 scope.go:117] "RemoveContainer" containerID="02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.562764 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.566601 5081 scope.go:117] "RemoveContainer" containerID="5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e" Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.566998 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e\": container with ID starting with 5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e not found: ID does not exist" containerID="5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.567021 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e"} err="failed to get container status \"5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e\": rpc error: code = NotFound desc = could not find container \"5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e\": container with ID starting with 5fabbbbb74ff06e4e1dad8ee8268cacd0abaf25064f01251e6ee1898b2ab583e not found: ID does not exist" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.567042 5081 scope.go:117] "RemoveContainer" containerID="02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05" Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.567288 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05\": container with ID starting with 02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05 not found: ID does not exist" containerID="02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.567319 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05"} err="failed to get container status \"02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05\": rpc error: code = NotFound desc = could not find container \"02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05\": container with ID starting with 02826c8885a6e6963c96553e2f9836d3c77c18cdf8b0829595c2d0f11dffbb05 not found: ID does not exist" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.567343 5081 scope.go:117] "RemoveContainer" containerID="9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.600781 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-tls\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.600839 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-erlang-cookie\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.600892 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-confd\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.600930 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5344c615-93ae-4a4a-95b1-3bbe3327f42e-pod-info\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.600967 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-plugins-conf\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.601010 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5344c615-93ae-4a4a-95b1-3bbe3327f42e-erlang-cookie-secret\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.601033 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.601071 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-plugins\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.601106 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6vsv\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-kube-api-access-j6vsv\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.601179 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-server-conf\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.601210 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data\") pod \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\" (UID: \"5344c615-93ae-4a4a-95b1-3bbe3327f42e\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.607380 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.608352 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.609597 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.611450 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5344c615-93ae-4a4a-95b1-3bbe3327f42e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.613869 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-kube-api-access-j6vsv" (OuterVolumeSpecName: "kube-api-access-j6vsv") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "kube-api-access-j6vsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.614595 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.620653 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.633738 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5344c615-93ae-4a4a-95b1-3bbe3327f42e-pod-info" (OuterVolumeSpecName: "pod-info") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.645998 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data" (OuterVolumeSpecName: "config-data") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.646177 5081 scope.go:117] "RemoveContainer" containerID="9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18" Oct 03 15:53:12 crc kubenswrapper[5081]: E1003 15:53:12.646570 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18\": container with ID starting with 9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18 not found: ID does not exist" containerID="9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.646630 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18"} err="failed to get container status \"9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18\": rpc error: code = NotFound desc = could not find container \"9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18\": container with ID starting with 9be626353e85505e2c754363361e13f24356c5581d0d18d5df75690d7bf8ee18 not found: ID does not exist" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.689201 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-server-conf" (OuterVolumeSpecName: "server-conf") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704077 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704106 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704118 5081 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5344c615-93ae-4a4a-95b1-3bbe3327f42e-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704130 5081 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704141 5081 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5344c615-93ae-4a4a-95b1-3bbe3327f42e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704165 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704176 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704188 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6vsv\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-kube-api-access-j6vsv\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704198 5081 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.704208 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5344c615-93ae-4a4a-95b1-3bbe3327f42e-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.729918 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.790073 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.793992 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5344c615-93ae-4a4a-95b1-3bbe3327f42e" (UID: "5344c615-93ae-4a4a-95b1-3bbe3327f42e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.801093 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.805769 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5344c615-93ae-4a4a-95b1-3bbe3327f42e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.805843 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.906764 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-combined-ca-bundle\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.906817 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smv7r\" (UniqueName: \"kubernetes.io/projected/f1151206-1989-4b3a-bc02-176a6f3cf481-kube-api-access-smv7r\") pod \"f1151206-1989-4b3a-bc02-176a6f3cf481\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.906851 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-config-data\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.906891 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-scripts\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.906932 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-log-httpd\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.906966 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-config-data\") pod \"f1151206-1989-4b3a-bc02-176a6f3cf481\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.907038 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7wx4\" (UniqueName: \"kubernetes.io/projected/9c38ca50-e27f-42f4-b828-12ca75618d53-kube-api-access-c7wx4\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.907071 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-combined-ca-bundle\") pod \"f1151206-1989-4b3a-bc02-176a6f3cf481\" (UID: \"f1151206-1989-4b3a-bc02-176a6f3cf481\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.907414 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-run-httpd\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.907482 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-ceilometer-tls-certs\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.907518 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-sg-core-conf-yaml\") pod \"9c38ca50-e27f-42f4-b828-12ca75618d53\" (UID: \"9c38ca50-e27f-42f4-b828-12ca75618d53\") " Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.907981 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.910411 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.914251 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c38ca50-e27f-42f4-b828-12ca75618d53-kube-api-access-c7wx4" (OuterVolumeSpecName: "kube-api-access-c7wx4") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "kube-api-access-c7wx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.930878 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-scripts" (OuterVolumeSpecName: "scripts") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.939931 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1151206-1989-4b3a-bc02-176a6f3cf481-kube-api-access-smv7r" (OuterVolumeSpecName: "kube-api-access-smv7r") pod "f1151206-1989-4b3a-bc02-176a6f3cf481" (UID: "f1151206-1989-4b3a-bc02-176a6f3cf481"). InnerVolumeSpecName "kube-api-access-smv7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.966736 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-config-data" (OuterVolumeSpecName: "config-data") pod "f1151206-1989-4b3a-bc02-176a6f3cf481" (UID: "f1151206-1989-4b3a-bc02-176a6f3cf481"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.988677 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1151206-1989-4b3a-bc02-176a6f3cf481" (UID: "f1151206-1989-4b3a-bc02-176a6f3cf481"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:12 crc kubenswrapper[5081]: I1003 15:53:12.989823 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.001749 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009522 5081 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009589 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009602 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smv7r\" (UniqueName: \"kubernetes.io/projected/f1151206-1989-4b3a-bc02-176a6f3cf481-kube-api-access-smv7r\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009615 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009628 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009640 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009651 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7wx4\" (UniqueName: \"kubernetes.io/projected/9c38ca50-e27f-42f4-b828-12ca75618d53-kube-api-access-c7wx4\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009664 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1151206-1989-4b3a-bc02-176a6f3cf481-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.009676 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c38ca50-e27f-42f4-b828-12ca75618d53-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.044660 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-config-data" (OuterVolumeSpecName: "config-data") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.050736 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c38ca50-e27f-42f4-b828-12ca75618d53" (UID: "9c38ca50-e27f-42f4-b828-12ca75618d53"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.110766 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.111092 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ca50-e27f-42f4-b828-12ca75618d53-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.156247 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.156714 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.172855 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_477e7150-1a22-403b-950e-6d1547d2859c/ovn-northd/0.log" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.172959 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.212781 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-config\") pod \"477e7150-1a22-403b-950e-6d1547d2859c\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.213004 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-ovn-northd-tls-certs\") pod \"477e7150-1a22-403b-950e-6d1547d2859c\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.213088 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/477e7150-1a22-403b-950e-6d1547d2859c-ovn-rundir\") pod \"477e7150-1a22-403b-950e-6d1547d2859c\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.213171 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-combined-ca-bundle\") pod \"477e7150-1a22-403b-950e-6d1547d2859c\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.213212 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-scripts\") pod \"477e7150-1a22-403b-950e-6d1547d2859c\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.213258 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d5t7\" (UniqueName: \"kubernetes.io/projected/477e7150-1a22-403b-950e-6d1547d2859c-kube-api-access-4d5t7\") pod \"477e7150-1a22-403b-950e-6d1547d2859c\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.213315 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-metrics-certs-tls-certs\") pod \"477e7150-1a22-403b-950e-6d1547d2859c\" (UID: \"477e7150-1a22-403b-950e-6d1547d2859c\") " Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.213600 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-config" (OuterVolumeSpecName: "config") pod "477e7150-1a22-403b-950e-6d1547d2859c" (UID: "477e7150-1a22-403b-950e-6d1547d2859c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.214161 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.214454 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-scripts" (OuterVolumeSpecName: "scripts") pod "477e7150-1a22-403b-950e-6d1547d2859c" (UID: "477e7150-1a22-403b-950e-6d1547d2859c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.214831 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/477e7150-1a22-403b-950e-6d1547d2859c-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "477e7150-1a22-403b-950e-6d1547d2859c" (UID: "477e7150-1a22-403b-950e-6d1547d2859c"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.218793 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477e7150-1a22-403b-950e-6d1547d2859c-kube-api-access-4d5t7" (OuterVolumeSpecName: "kube-api-access-4d5t7") pod "477e7150-1a22-403b-950e-6d1547d2859c" (UID: "477e7150-1a22-403b-950e-6d1547d2859c"). InnerVolumeSpecName "kube-api-access-4d5t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.243924 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "477e7150-1a22-403b-950e-6d1547d2859c" (UID: "477e7150-1a22-403b-950e-6d1547d2859c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.272409 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.272452 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"f1151206-1989-4b3a-bc02-176a6f3cf481","Type":"ContainerDied","Data":"228fa2f1c9585705ded2b3e1324e46b5764f869d03351d493b4374d44258995a"} Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.272499 5081 scope.go:117] "RemoveContainer" containerID="8319471a70bf1bf36c525a43e07ed4bd7af9f3625c1d2aed386f1d80a9a233c3" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.276674 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5344c615-93ae-4a4a-95b1-3bbe3327f42e","Type":"ContainerDied","Data":"e135e27dd60f8ae622221c072f1620cfa082534752056de1943e3a06181dc780"} Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.276791 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.281272 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.280961 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9c38ca50-e27f-42f4-b828-12ca75618d53","Type":"ContainerDied","Data":"5cea3734be751a28ab4a74fa7c96c0f8ddd8c8f6782a315b997aea60d80e315f"} Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.283504 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_477e7150-1a22-403b-950e-6d1547d2859c/ovn-northd/0.log" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.283575 5081 generic.go:334] "Generic (PLEG): container finished" podID="477e7150-1a22-403b-950e-6d1547d2859c" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" exitCode=139 Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.283712 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.283853 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"477e7150-1a22-403b-950e-6d1547d2859c","Type":"ContainerDied","Data":"8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04"} Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.283902 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"477e7150-1a22-403b-950e-6d1547d2859c","Type":"ContainerDied","Data":"32e7adf5812d09d2318aeea63c100ec4689502bd2bd627508ae592d923d7f65a"} Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.293523 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "477e7150-1a22-403b-950e-6d1547d2859c" (UID: "477e7150-1a22-403b-950e-6d1547d2859c"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.316474 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.316540 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/477e7150-1a22-403b-950e-6d1547d2859c-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.316552 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.316734 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/477e7150-1a22-403b-950e-6d1547d2859c-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.316746 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d5t7\" (UniqueName: \"kubernetes.io/projected/477e7150-1a22-403b-950e-6d1547d2859c-kube-api-access-4d5t7\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.324876 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.325154 5081 scope.go:117] "RemoveContainer" containerID="b01bf452cb29e0baf0468dc3879379123d94b893f16e315da708a69a521d9ace" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.334537 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "477e7150-1a22-403b-950e-6d1547d2859c" (UID: "477e7150-1a22-403b-950e-6d1547d2859c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.336890 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.362276 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.371630 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.379911 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.386486 5081 scope.go:117] "RemoveContainer" containerID="a5feaf1cce73df27885a56ebceeea244ff4cd05c4d9e4c7c1fd17a91558166ca" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.387522 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.418214 5081 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/477e7150-1a22-403b-950e-6d1547d2859c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.456460 5081 scope.go:117] "RemoveContainer" containerID="c0cc61bb578e6b82b21ec38b2933e461e6db44834efbf165d4d23566ac8055d0" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.481368 5081 scope.go:117] "RemoveContainer" containerID="15cacaf50eb36833c76259d52ef81d26141a2d02ee727e2efd6a2a4ccbfc0c93" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.509461 5081 scope.go:117] "RemoveContainer" containerID="d458d29d42b270ecda96b4132690bafef24693a1bcafe119e2fb5b9bab9353b5" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.542414 5081 scope.go:117] "RemoveContainer" containerID="e89a72236f59fd3fd90bec6d4e3cb013c31ae0d3b3cd93b35b04de7bbc1b544a" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.575002 5081 scope.go:117] "RemoveContainer" containerID="49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.639224 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.640061 5081 scope.go:117] "RemoveContainer" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.652065 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.674964 5081 scope.go:117] "RemoveContainer" containerID="49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71" Oct 03 15:53:13 crc kubenswrapper[5081]: E1003 15:53:13.675631 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71\": container with ID starting with 49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71 not found: ID does not exist" containerID="49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.675680 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71"} err="failed to get container status \"49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71\": rpc error: code = NotFound desc = could not find container \"49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71\": container with ID starting with 49b16ee5de03f1293260595fb4463cceeb42e9980d90398bfabe16c799233d71 not found: ID does not exist" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.675715 5081 scope.go:117] "RemoveContainer" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" Oct 03 15:53:13 crc kubenswrapper[5081]: E1003 15:53:13.676348 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04\": container with ID starting with 8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04 not found: ID does not exist" containerID="8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.676381 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04"} err="failed to get container status \"8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04\": rpc error: code = NotFound desc = could not find container \"8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04\": container with ID starting with 8537d4ee6f552a7d207d0795b2ec0bdfae8098ec91cc255ea7edcf04175d1c04 not found: ID does not exist" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.838898 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="254b0c39-e3af-4a48-a954-5ff334d36670" path="/var/lib/kubelet/pods/254b0c39-e3af-4a48-a954-5ff334d36670/volumes" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.839717 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477e7150-1a22-403b-950e-6d1547d2859c" path="/var/lib/kubelet/pods/477e7150-1a22-403b-950e-6d1547d2859c/volumes" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.841246 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" path="/var/lib/kubelet/pods/5344c615-93ae-4a4a-95b1-3bbe3327f42e/volumes" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.843099 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" path="/var/lib/kubelet/pods/7060c6c8-bbe8-47ae-8ef2-4358291dbb61/volumes" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.844146 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" path="/var/lib/kubelet/pods/9c38ca50-e27f-42f4-b828-12ca75618d53/volumes" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.846521 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1151206-1989-4b3a-bc02-176a6f3cf481" path="/var/lib/kubelet/pods/f1151206-1989-4b3a-bc02-176a6f3cf481/volumes" Oct 03 15:53:13 crc kubenswrapper[5081]: I1003 15:53:13.847672 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" path="/var/lib/kubelet/pods/f64376f5-c10d-45cb-a9eb-81d9a4cd8b19/volumes" Oct 03 15:53:16 crc kubenswrapper[5081]: I1003 15:53:16.579827 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.234902 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.236385 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.236818 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.236864 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.246414 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.249548 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.253027 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:17 crc kubenswrapper[5081]: E1003 15:53:17.253113 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.343154 5081 generic.go:334] "Generic (PLEG): container finished" podID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerID="34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a" exitCode=0 Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.343206 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5","Type":"ContainerDied","Data":"34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a"} Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.592633 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605031 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605101 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-default\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605156 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdzkk\" (UniqueName: \"kubernetes.io/projected/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kube-api-access-vdzkk\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605198 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kolla-config\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605246 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-generated\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605300 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-combined-ca-bundle\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605332 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-galera-tls-certs\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605387 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-operator-scripts\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.605471 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-secrets\") pod \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\" (UID: \"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5\") " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.606131 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.606145 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.606502 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.607050 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.621330 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-secrets" (OuterVolumeSpecName: "secrets") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.627744 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kube-api-access-vdzkk" (OuterVolumeSpecName: "kube-api-access-vdzkk") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "kube-api-access-vdzkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.629046 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.693500 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708028 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708070 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708086 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdzkk\" (UniqueName: \"kubernetes.io/projected/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kube-api-access-vdzkk\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708100 5081 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708115 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708127 5081 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708138 5081 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.708149 5081 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.713850 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" (UID: "f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.731043 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.808900 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:18 crc kubenswrapper[5081]: I1003 15:53:18.810093 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:19 crc kubenswrapper[5081]: I1003 15:53:19.374961 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5","Type":"ContainerDied","Data":"c2b2dbef3cf95a2c7bd9fa9ae0eba334e37bf7b5cf4fd95c2daad84683fcef05"} Oct 03 15:53:19 crc kubenswrapper[5081]: I1003 15:53:19.375029 5081 scope.go:117] "RemoveContainer" containerID="34dd5e4aec5430d8295ea66b04403b2448f41a60bd6c7616ea2d32006961384a" Oct 03 15:53:19 crc kubenswrapper[5081]: I1003 15:53:19.375256 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 15:53:19 crc kubenswrapper[5081]: I1003 15:53:19.409772 5081 scope.go:117] "RemoveContainer" containerID="5334f37d8987ae4171ae3899b8da5f80778f1221e5d6c5d21da758b59468cc2f" Oct 03 15:53:19 crc kubenswrapper[5081]: I1003 15:53:19.417651 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 15:53:19 crc kubenswrapper[5081]: I1003 15:53:19.424907 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 15:53:19 crc kubenswrapper[5081]: I1003 15:53:19.839680 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" path="/var/lib/kubelet/pods/f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5/volumes" Oct 03 15:53:21 crc kubenswrapper[5081]: I1003 15:53:21.564739 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:21 crc kubenswrapper[5081]: I1003 15:53:21.585851 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.235362 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.236023 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.236449 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.236425 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.236591 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.240056 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.243823 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:22 crc kubenswrapper[5081]: E1003 15:53:22.243879 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.440922 5081 generic.go:334] "Generic (PLEG): container finished" podID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerID="c7fd1d71f2d9ba417f029d604edbb305edd80c93b20a0706ffa0b1ed0e2b1efc" exitCode=0 Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.441059 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ff85fbc4f-f9zcx" event={"ID":"264449b6-a64d-4d0b-a465-616fa49b3eca","Type":"ContainerDied","Data":"c7fd1d71f2d9ba417f029d604edbb305edd80c93b20a0706ffa0b1ed0e2b1efc"} Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.754429 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.925751 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-public-tls-certs\") pod \"264449b6-a64d-4d0b-a465-616fa49b3eca\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.926224 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-combined-ca-bundle\") pod \"264449b6-a64d-4d0b-a465-616fa49b3eca\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.926289 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-ovndb-tls-certs\") pod \"264449b6-a64d-4d0b-a465-616fa49b3eca\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.926319 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-config\") pod \"264449b6-a64d-4d0b-a465-616fa49b3eca\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.926357 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-internal-tls-certs\") pod \"264449b6-a64d-4d0b-a465-616fa49b3eca\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.926421 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-httpd-config\") pod \"264449b6-a64d-4d0b-a465-616fa49b3eca\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.926543 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv59w\" (UniqueName: \"kubernetes.io/projected/264449b6-a64d-4d0b-a465-616fa49b3eca-kube-api-access-dv59w\") pod \"264449b6-a64d-4d0b-a465-616fa49b3eca\" (UID: \"264449b6-a64d-4d0b-a465-616fa49b3eca\") " Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.936210 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/264449b6-a64d-4d0b-a465-616fa49b3eca-kube-api-access-dv59w" (OuterVolumeSpecName: "kube-api-access-dv59w") pod "264449b6-a64d-4d0b-a465-616fa49b3eca" (UID: "264449b6-a64d-4d0b-a465-616fa49b3eca"). InnerVolumeSpecName "kube-api-access-dv59w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.940006 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "264449b6-a64d-4d0b-a465-616fa49b3eca" (UID: "264449b6-a64d-4d0b-a465-616fa49b3eca"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.973519 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "264449b6-a64d-4d0b-a465-616fa49b3eca" (UID: "264449b6-a64d-4d0b-a465-616fa49b3eca"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.973615 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-config" (OuterVolumeSpecName: "config") pod "264449b6-a64d-4d0b-a465-616fa49b3eca" (UID: "264449b6-a64d-4d0b-a465-616fa49b3eca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.974094 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "264449b6-a64d-4d0b-a465-616fa49b3eca" (UID: "264449b6-a64d-4d0b-a465-616fa49b3eca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.975155 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "264449b6-a64d-4d0b-a465-616fa49b3eca" (UID: "264449b6-a64d-4d0b-a465-616fa49b3eca"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:25 crc kubenswrapper[5081]: I1003 15:53:25.994760 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "264449b6-a64d-4d0b-a465-616fa49b3eca" (UID: "264449b6-a64d-4d0b-a465-616fa49b3eca"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.028411 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv59w\" (UniqueName: \"kubernetes.io/projected/264449b6-a64d-4d0b-a465-616fa49b3eca-kube-api-access-dv59w\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.028453 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.028466 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.028478 5081 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.028490 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.028504 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.028515 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/264449b6-a64d-4d0b-a465-616fa49b3eca-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.451166 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ff85fbc4f-f9zcx" event={"ID":"264449b6-a64d-4d0b-a465-616fa49b3eca","Type":"ContainerDied","Data":"89dc724fc8899b69f08b99987a7aed7d102c8072ed2e84563fffdc198c3b01f2"} Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.451217 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ff85fbc4f-f9zcx" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.451233 5081 scope.go:117] "RemoveContainer" containerID="c75af55c799a16c6d3fb1fcae9ca1ebc16c7f7ce2f3b26cb7521040390a192ad" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.471748 5081 scope.go:117] "RemoveContainer" containerID="c7fd1d71f2d9ba417f029d604edbb305edd80c93b20a0706ffa0b1ed0e2b1efc" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.486667 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ff85fbc4f-f9zcx"] Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.492978 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5ff85fbc4f-f9zcx"] Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.569776 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:26 crc kubenswrapper[5081]: I1003 15:53:26.590883 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.234214 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.237093 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.237170 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.237588 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.237680 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.239660 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.241230 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:27 crc kubenswrapper[5081]: E1003 15:53:27.241286 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:27 crc kubenswrapper[5081]: I1003 15:53:27.840151 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" path="/var/lib/kubelet/pods/264449b6-a64d-4d0b-a465-616fa49b3eca/volumes" Oct 03 15:53:30 crc kubenswrapper[5081]: I1003 15:53:30.647731 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:53:30 crc kubenswrapper[5081]: I1003 15:53:30.648072 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:53:31 crc kubenswrapper[5081]: I1003 15:53:31.573738 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:31 crc kubenswrapper[5081]: I1003 15:53:31.594750 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.234466 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.236414 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.236879 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.236944 5081 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.237019 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.238618 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.239823 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 03 15:53:32 crc kubenswrapper[5081]: E1003 15:53:32.239861 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-52hh5" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:32 crc kubenswrapper[5081]: I1003 15:53:32.508433 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-52hh5_f41a1c07-9bcc-4237-869e-dff5d9c480f8/ovs-vswitchd/0.log" Oct 03 15:53:32 crc kubenswrapper[5081]: I1003 15:53:32.510410 5081 generic.go:334] "Generic (PLEG): container finished" podID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" exitCode=137 Oct 03 15:53:32 crc kubenswrapper[5081]: I1003 15:53:32.510450 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-52hh5" event={"ID":"f41a1c07-9bcc-4237-869e-dff5d9c480f8","Type":"ContainerDied","Data":"a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252"} Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.454426 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-52hh5_f41a1c07-9bcc-4237-869e-dff5d9c480f8/ovs-vswitchd/0.log" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.455400 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.548273 5081 generic.go:334] "Generic (PLEG): container finished" podID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerID="00b89755a18526b56352f35b1330f853e78ba6ee6b50eb49214837e6f9797ab9" exitCode=137 Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.548354 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"00b89755a18526b56352f35b1330f853e78ba6ee6b50eb49214837e6f9797ab9"} Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.550147 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-52hh5_f41a1c07-9bcc-4237-869e-dff5d9c480f8/ovs-vswitchd/0.log" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.550768 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-52hh5" event={"ID":"f41a1c07-9bcc-4237-869e-dff5d9c480f8","Type":"ContainerDied","Data":"614d239c57dcf51221b6fce0fa442cf71d5f3dc97dea8711c6acbf0f7c511e2d"} Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.550802 5081 scope.go:117] "RemoveContainer" containerID="a6ea929d28f3f2b4c9386395d125308ca2122cde748859248b72c0ac6579f252" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.550851 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-52hh5" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572128 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-etc-ovs\") pod \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572233 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "f41a1c07-9bcc-4237-869e-dff5d9c480f8" (UID: "f41a1c07-9bcc-4237-869e-dff5d9c480f8"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572267 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bbvl\" (UniqueName: \"kubernetes.io/projected/f41a1c07-9bcc-4237-869e-dff5d9c480f8-kube-api-access-7bbvl\") pod \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572290 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-run\") pod \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572327 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-lib\") pod \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572416 5081 scope.go:117] "RemoveContainer" containerID="d53a12325ce2f191e204bc8ddf0d7f3a0c087420bff950f21271de52b0eb5903" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572518 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-log\") pod \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572592 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-run" (OuterVolumeSpecName: "var-run") pod "f41a1c07-9bcc-4237-869e-dff5d9c480f8" (UID: "f41a1c07-9bcc-4237-869e-dff5d9c480f8"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572629 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-lib" (OuterVolumeSpecName: "var-lib") pod "f41a1c07-9bcc-4237-869e-dff5d9c480f8" (UID: "f41a1c07-9bcc-4237-869e-dff5d9c480f8"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572664 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f41a1c07-9bcc-4237-869e-dff5d9c480f8-scripts\") pod \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\" (UID: \"f41a1c07-9bcc-4237-869e-dff5d9c480f8\") " Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.572726 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-log" (OuterVolumeSpecName: "var-log") pod "f41a1c07-9bcc-4237-869e-dff5d9c480f8" (UID: "f41a1c07-9bcc-4237-869e-dff5d9c480f8"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.573046 5081 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-etc-ovs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.573127 5081 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.573138 5081 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-lib\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.573146 5081 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f41a1c07-9bcc-4237-869e-dff5d9c480f8-var-log\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.573838 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f41a1c07-9bcc-4237-869e-dff5d9c480f8-scripts" (OuterVolumeSpecName: "scripts") pod "f41a1c07-9bcc-4237-869e-dff5d9c480f8" (UID: "f41a1c07-9bcc-4237-869e-dff5d9c480f8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.578442 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f41a1c07-9bcc-4237-869e-dff5d9c480f8-kube-api-access-7bbvl" (OuterVolumeSpecName: "kube-api-access-7bbvl") pod "f41a1c07-9bcc-4237-869e-dff5d9c480f8" (UID: "f41a1c07-9bcc-4237-869e-dff5d9c480f8"). InnerVolumeSpecName "kube-api-access-7bbvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.643616 5081 scope.go:117] "RemoveContainer" containerID="f53a1e2f0c35790a4b2b82250da1a2f59c50900b47cf68bf115effe5e730954c" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.674994 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f41a1c07-9bcc-4237-869e-dff5d9c480f8-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.675042 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bbvl\" (UniqueName: \"kubernetes.io/projected/f41a1c07-9bcc-4237-869e-dff5d9c480f8-kube-api-access-7bbvl\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.907611 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-52hh5"] Oct 03 15:53:33 crc kubenswrapper[5081]: I1003 15:53:33.922585 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-52hh5"] Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.048053 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.181193 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8cws\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-kube-api-access-s8cws\") pod \"4791d8d2-2a2a-4595-8678-10ec383956f0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.181289 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-lock\") pod \"4791d8d2-2a2a-4595-8678-10ec383956f0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.181322 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-cache\") pod \"4791d8d2-2a2a-4595-8678-10ec383956f0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.181363 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") pod \"4791d8d2-2a2a-4595-8678-10ec383956f0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.181423 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"4791d8d2-2a2a-4595-8678-10ec383956f0\" (UID: \"4791d8d2-2a2a-4595-8678-10ec383956f0\") " Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.181989 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-cache" (OuterVolumeSpecName: "cache") pod "4791d8d2-2a2a-4595-8678-10ec383956f0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.182446 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-lock" (OuterVolumeSpecName: "lock") pod "4791d8d2-2a2a-4595-8678-10ec383956f0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.186706 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "swift") pod "4791d8d2-2a2a-4595-8678-10ec383956f0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.186765 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-kube-api-access-s8cws" (OuterVolumeSpecName: "kube-api-access-s8cws") pod "4791d8d2-2a2a-4595-8678-10ec383956f0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0"). InnerVolumeSpecName "kube-api-access-s8cws". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.186845 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4791d8d2-2a2a-4595-8678-10ec383956f0" (UID: "4791d8d2-2a2a-4595-8678-10ec383956f0"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.283103 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8cws\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-kube-api-access-s8cws\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.283141 5081 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-lock\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.283150 5081 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4791d8d2-2a2a-4595-8678-10ec383956f0-cache\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.283158 5081 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4791d8d2-2a2a-4595-8678-10ec383956f0-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.283194 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.298531 5081 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.384348 5081 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.565544 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"4791d8d2-2a2a-4595-8678-10ec383956f0","Type":"ContainerDied","Data":"ab677a205221fcaeb13f4f7b06fda4f0ca2c646591db3e721c18127142d790a9"} Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.565661 5081 scope.go:117] "RemoveContainer" containerID="00b89755a18526b56352f35b1330f853e78ba6ee6b50eb49214837e6f9797ab9" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.565680 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.587972 5081 scope.go:117] "RemoveContainer" containerID="e80c760b9b37c5a71b037090135c5ba4f32f98d8709aa318d7bf69734c058ea2" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.602574 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.609781 5081 scope.go:117] "RemoveContainer" containerID="a7b2d7ca1d510ef79cf6048ee9579f2ac7ff3e40ff9234a031ecf02dfba25777" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.612614 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.626361 5081 scope.go:117] "RemoveContainer" containerID="0dfae43d5dfa9c09237aa7cb9e6fcba01b60a0f0e13fdd86961e6469f5f09d3c" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.671727 5081 scope.go:117] "RemoveContainer" containerID="fc2a1a4f0df9739d588ce081aaaa43ab9cfe57521cacaf41f5e2e169875cad7b" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.690982 5081 scope.go:117] "RemoveContainer" containerID="962a1e3fd606faa70fe55c161c25398c016ac0969ef92d4e88b58a60f3ef02eb" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.724241 5081 scope.go:117] "RemoveContainer" containerID="2d265cc1788a8ffd41d868c45e20c3c29c12f51c4e066b3d0b0c81546645bcfa" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.744495 5081 scope.go:117] "RemoveContainer" containerID="ae79f147bd8fd93a896550501e3a9434ca704c15d3e99a1d98595472b5b0f638" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.765818 5081 scope.go:117] "RemoveContainer" containerID="d0329f49fa7dc846b20dc08a8389809ac26059dd282cb5b7a946f6475f240c48" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.805723 5081 scope.go:117] "RemoveContainer" containerID="45463607f3525f8e75aa01b96f684ea8a4d207f0bdd044315c3bcfc0933d9b65" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.828858 5081 scope.go:117] "RemoveContainer" containerID="676eedca602ba56315b71044aa07d745f875330d2c6a9a252a84c0c20469a5b3" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.848074 5081 scope.go:117] "RemoveContainer" containerID="9fb9ff61254c258e053e39687cfe871e46e9e37bc3923e11a92f9ba4e6d36e54" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.865623 5081 scope.go:117] "RemoveContainer" containerID="6183bf8da2a80f5b9e9698fac4e3f60d821b5c52084e202cff3ec20d564ffd21" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.902901 5081 scope.go:117] "RemoveContainer" containerID="21dea586b40c33dcc79f88530d74f5e7fcd590c00c4174c95c9b987e02a408cb" Oct 03 15:53:34 crc kubenswrapper[5081]: I1003 15:53:34.919702 5081 scope.go:117] "RemoveContainer" containerID="2dc48262e0ade0cd5b46c732df33fce4d98185362e199780f79f0145f57aa828" Oct 03 15:53:35 crc kubenswrapper[5081]: I1003 15:53:35.836816 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" path="/var/lib/kubelet/pods/4791d8d2-2a2a-4595-8678-10ec383956f0/volumes" Oct 03 15:53:35 crc kubenswrapper[5081]: I1003 15:53:35.839197 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" path="/var/lib/kubelet/pods/f41a1c07-9bcc-4237-869e-dff5d9c480f8/volumes" Oct 03 15:53:36 crc kubenswrapper[5081]: I1003 15:53:36.578754 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:36 crc kubenswrapper[5081]: I1003 15:53:36.599916 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 15:53:37 crc kubenswrapper[5081]: I1003 15:53:37.591855 5081 generic.go:334] "Generic (PLEG): container finished" podID="684a3452-107a-4e1f-93a5-c063711e6377" containerID="05bb924771a100f2ff1e235a945a86029a7579a8826ee5c4ed8c670df36b3446" exitCode=137 Oct 03 15:53:37 crc kubenswrapper[5081]: I1003 15:53:37.591912 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" event={"ID":"684a3452-107a-4e1f-93a5-c063711e6377","Type":"ContainerDied","Data":"05bb924771a100f2ff1e235a945a86029a7579a8826ee5c4ed8c670df36b3446"} Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.036177 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.158123 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data\") pod \"684a3452-107a-4e1f-93a5-c063711e6377\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.158173 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data-custom\") pod \"684a3452-107a-4e1f-93a5-c063711e6377\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.158323 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-combined-ca-bundle\") pod \"684a3452-107a-4e1f-93a5-c063711e6377\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.158367 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/684a3452-107a-4e1f-93a5-c063711e6377-logs\") pod \"684a3452-107a-4e1f-93a5-c063711e6377\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.158397 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5rmz\" (UniqueName: \"kubernetes.io/projected/684a3452-107a-4e1f-93a5-c063711e6377-kube-api-access-b5rmz\") pod \"684a3452-107a-4e1f-93a5-c063711e6377\" (UID: \"684a3452-107a-4e1f-93a5-c063711e6377\") " Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.160196 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/684a3452-107a-4e1f-93a5-c063711e6377-logs" (OuterVolumeSpecName: "logs") pod "684a3452-107a-4e1f-93a5-c063711e6377" (UID: "684a3452-107a-4e1f-93a5-c063711e6377"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.176525 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "684a3452-107a-4e1f-93a5-c063711e6377" (UID: "684a3452-107a-4e1f-93a5-c063711e6377"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.180590 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "684a3452-107a-4e1f-93a5-c063711e6377" (UID: "684a3452-107a-4e1f-93a5-c063711e6377"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.182328 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/684a3452-107a-4e1f-93a5-c063711e6377-kube-api-access-b5rmz" (OuterVolumeSpecName: "kube-api-access-b5rmz") pod "684a3452-107a-4e1f-93a5-c063711e6377" (UID: "684a3452-107a-4e1f-93a5-c063711e6377"). InnerVolumeSpecName "kube-api-access-b5rmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.203509 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data" (OuterVolumeSpecName: "config-data") pod "684a3452-107a-4e1f-93a5-c063711e6377" (UID: "684a3452-107a-4e1f-93a5-c063711e6377"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.235653 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedee-account-delete-tcdxx" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.259624 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.259656 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.259667 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/684a3452-107a-4e1f-93a5-c063711e6377-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.259677 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/684a3452-107a-4e1f-93a5-c063711e6377-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.259687 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5rmz\" (UniqueName: \"kubernetes.io/projected/684a3452-107a-4e1f-93a5-c063711e6377-kube-api-access-b5rmz\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.360416 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcf48\" (UniqueName: \"kubernetes.io/projected/e86e6033-7336-4deb-bf90-8c4941d56542-kube-api-access-kcf48\") pod \"e86e6033-7336-4deb-bf90-8c4941d56542\" (UID: \"e86e6033-7336-4deb-bf90-8c4941d56542\") " Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.363334 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e86e6033-7336-4deb-bf90-8c4941d56542-kube-api-access-kcf48" (OuterVolumeSpecName: "kube-api-access-kcf48") pod "e86e6033-7336-4deb-bf90-8c4941d56542" (UID: "e86e6033-7336-4deb-bf90-8c4941d56542"). InnerVolumeSpecName "kube-api-access-kcf48". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.462003 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcf48\" (UniqueName: \"kubernetes.io/projected/e86e6033-7336-4deb-bf90-8c4941d56542-kube-api-access-kcf48\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.602707 5081 generic.go:334] "Generic (PLEG): container finished" podID="e86e6033-7336-4deb-bf90-8c4941d56542" containerID="bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf" exitCode=137 Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.602750 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedee-account-delete-tcdxx" event={"ID":"e86e6033-7336-4deb-bf90-8c4941d56542","Type":"ContainerDied","Data":"bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf"} Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.603137 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedee-account-delete-tcdxx" event={"ID":"e86e6033-7336-4deb-bf90-8c4941d56542","Type":"ContainerDied","Data":"0b5d509d21e76fb092e8a9f720e51c9b9f3b048c51f5b30bd82dc38072d9922a"} Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.603155 5081 scope.go:117] "RemoveContainer" containerID="bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.602795 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedee-account-delete-tcdxx" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.605315 5081 generic.go:334] "Generic (PLEG): container finished" podID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerID="9d8741c1eed1c55db0932c406767602466524f85522227eda0fdd1d95c35f2e1" exitCode=137 Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.605372 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-577c5877f7-gslrj" event={"ID":"fcbf652f-a193-47f7-872b-e9864a40cd0a","Type":"ContainerDied","Data":"9d8741c1eed1c55db0932c406767602466524f85522227eda0fdd1d95c35f2e1"} Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.607336 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" event={"ID":"684a3452-107a-4e1f-93a5-c063711e6377","Type":"ContainerDied","Data":"304034c7570a894a485d85232f959ab51f7a6cbbd0090b93e1d195aa002ec3c7"} Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.607412 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-b57b4ccd-848cl" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.636775 5081 scope.go:117] "RemoveContainer" containerID="bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf" Oct 03 15:53:38 crc kubenswrapper[5081]: E1003 15:53:38.637978 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf\": container with ID starting with bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf not found: ID does not exist" containerID="bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.638034 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf"} err="failed to get container status \"bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf\": rpc error: code = NotFound desc = could not find container \"bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf\": container with ID starting with bc84d28450291fd7a4cdef6909b5b66d5dfcade4c276d72432a2cfc95a1beecf not found: ID does not exist" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.638066 5081 scope.go:117] "RemoveContainer" containerID="05bb924771a100f2ff1e235a945a86029a7579a8826ee5c4ed8c670df36b3446" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.639622 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-b57b4ccd-848cl"] Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.652681 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-b57b4ccd-848cl"] Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.659159 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderedee-account-delete-tcdxx"] Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.664517 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinderedee-account-delete-tcdxx"] Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.669441 5081 scope.go:117] "RemoveContainer" containerID="702ba6b98e592df611cab939fccc402c5b2b70dd82e3db9c234d04ce1db93139" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.892203 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": EOF" Oct 03 15:53:38 crc kubenswrapper[5081]: I1003 15:53:38.905383 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df6654c96-f7vp5" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.207:9311/healthcheck\": EOF" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.152156 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.273626 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data-custom\") pod \"fcbf652f-a193-47f7-872b-e9864a40cd0a\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.273718 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-combined-ca-bundle\") pod \"fcbf652f-a193-47f7-872b-e9864a40cd0a\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.273828 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data\") pod \"fcbf652f-a193-47f7-872b-e9864a40cd0a\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.273889 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngn54\" (UniqueName: \"kubernetes.io/projected/fcbf652f-a193-47f7-872b-e9864a40cd0a-kube-api-access-ngn54\") pod \"fcbf652f-a193-47f7-872b-e9864a40cd0a\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.274034 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcbf652f-a193-47f7-872b-e9864a40cd0a-logs\") pod \"fcbf652f-a193-47f7-872b-e9864a40cd0a\" (UID: \"fcbf652f-a193-47f7-872b-e9864a40cd0a\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.274731 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcbf652f-a193-47f7-872b-e9864a40cd0a-logs" (OuterVolumeSpecName: "logs") pod "fcbf652f-a193-47f7-872b-e9864a40cd0a" (UID: "fcbf652f-a193-47f7-872b-e9864a40cd0a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.277467 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fcbf652f-a193-47f7-872b-e9864a40cd0a" (UID: "fcbf652f-a193-47f7-872b-e9864a40cd0a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.278641 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcbf652f-a193-47f7-872b-e9864a40cd0a-kube-api-access-ngn54" (OuterVolumeSpecName: "kube-api-access-ngn54") pod "fcbf652f-a193-47f7-872b-e9864a40cd0a" (UID: "fcbf652f-a193-47f7-872b-e9864a40cd0a"). InnerVolumeSpecName "kube-api-access-ngn54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.297999 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcbf652f-a193-47f7-872b-e9864a40cd0a" (UID: "fcbf652f-a193-47f7-872b-e9864a40cd0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.315103 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data" (OuterVolumeSpecName: "config-data") pod "fcbf652f-a193-47f7-872b-e9864a40cd0a" (UID: "fcbf652f-a193-47f7-872b-e9864a40cd0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.375174 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.375217 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngn54\" (UniqueName: \"kubernetes.io/projected/fcbf652f-a193-47f7-872b-e9864a40cd0a-kube-api-access-ngn54\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.375231 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcbf652f-a193-47f7-872b-e9864a40cd0a-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.375240 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.375255 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcbf652f-a193-47f7-872b-e9864a40cd0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.416990 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.577450 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-combined-ca-bundle\") pod \"bd5af114-d170-46ce-8a46-ec0b65ddb545\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.577932 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data\") pod \"bd5af114-d170-46ce-8a46-ec0b65ddb545\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.578099 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98td7\" (UniqueName: \"kubernetes.io/projected/bd5af114-d170-46ce-8a46-ec0b65ddb545-kube-api-access-98td7\") pod \"bd5af114-d170-46ce-8a46-ec0b65ddb545\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.578283 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd5af114-d170-46ce-8a46-ec0b65ddb545-logs\") pod \"bd5af114-d170-46ce-8a46-ec0b65ddb545\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.578420 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-internal-tls-certs\") pod \"bd5af114-d170-46ce-8a46-ec0b65ddb545\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.578583 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data-custom\") pod \"bd5af114-d170-46ce-8a46-ec0b65ddb545\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.578711 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-public-tls-certs\") pod \"bd5af114-d170-46ce-8a46-ec0b65ddb545\" (UID: \"bd5af114-d170-46ce-8a46-ec0b65ddb545\") " Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.579622 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd5af114-d170-46ce-8a46-ec0b65ddb545-logs" (OuterVolumeSpecName: "logs") pod "bd5af114-d170-46ce-8a46-ec0b65ddb545" (UID: "bd5af114-d170-46ce-8a46-ec0b65ddb545"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.580508 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd5af114-d170-46ce-8a46-ec0b65ddb545-logs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.581488 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd5af114-d170-46ce-8a46-ec0b65ddb545-kube-api-access-98td7" (OuterVolumeSpecName: "kube-api-access-98td7") pod "bd5af114-d170-46ce-8a46-ec0b65ddb545" (UID: "bd5af114-d170-46ce-8a46-ec0b65ddb545"). InnerVolumeSpecName "kube-api-access-98td7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.582112 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bd5af114-d170-46ce-8a46-ec0b65ddb545" (UID: "bd5af114-d170-46ce-8a46-ec0b65ddb545"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.599493 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd5af114-d170-46ce-8a46-ec0b65ddb545" (UID: "bd5af114-d170-46ce-8a46-ec0b65ddb545"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.622101 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bd5af114-d170-46ce-8a46-ec0b65ddb545" (UID: "bd5af114-d170-46ce-8a46-ec0b65ddb545"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.629958 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data" (OuterVolumeSpecName: "config-data") pod "bd5af114-d170-46ce-8a46-ec0b65ddb545" (UID: "bd5af114-d170-46ce-8a46-ec0b65ddb545"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.630973 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-577c5877f7-gslrj" event={"ID":"fcbf652f-a193-47f7-872b-e9864a40cd0a","Type":"ContainerDied","Data":"ecd9d45527a4d18d2658290373c32d1b031b1ef50cf1527862fd18fe7523928f"} Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.631019 5081 scope.go:117] "RemoveContainer" containerID="9d8741c1eed1c55db0932c406767602466524f85522227eda0fdd1d95c35f2e1" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.630990 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-577c5877f7-gslrj" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.632998 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bd5af114-d170-46ce-8a46-ec0b65ddb545" (UID: "bd5af114-d170-46ce-8a46-ec0b65ddb545"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.634072 5081 generic.go:334] "Generic (PLEG): container finished" podID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerID="1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730" exitCode=137 Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.634120 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df6654c96-f7vp5" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.634172 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df6654c96-f7vp5" event={"ID":"bd5af114-d170-46ce-8a46-ec0b65ddb545","Type":"ContainerDied","Data":"1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730"} Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.634204 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df6654c96-f7vp5" event={"ID":"bd5af114-d170-46ce-8a46-ec0b65ddb545","Type":"ContainerDied","Data":"44844f51fb0baf1185faf64810b79e0c150c92233ef89a368af98242a0874ae3"} Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.652402 5081 scope.go:117] "RemoveContainer" containerID="65b717a24fa17c44796587db00e37ce3290f0faba7d3338dd5922b855e7ae029" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.671060 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-577c5877f7-gslrj"] Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.681856 5081 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.681892 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.681901 5081 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.681912 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.681920 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5af114-d170-46ce-8a46-ec0b65ddb545-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.681929 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98td7\" (UniqueName: \"kubernetes.io/projected/bd5af114-d170-46ce-8a46-ec0b65ddb545-kube-api-access-98td7\") on node \"crc\" DevicePath \"\"" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.682702 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-577c5877f7-gslrj"] Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.688181 5081 scope.go:117] "RemoveContainer" containerID="1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.688520 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5df6654c96-f7vp5"] Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.695296 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5df6654c96-f7vp5"] Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.712240 5081 scope.go:117] "RemoveContainer" containerID="dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.728533 5081 scope.go:117] "RemoveContainer" containerID="1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730" Oct 03 15:53:39 crc kubenswrapper[5081]: E1003 15:53:39.729022 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730\": container with ID starting with 1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730 not found: ID does not exist" containerID="1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.729078 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730"} err="failed to get container status \"1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730\": rpc error: code = NotFound desc = could not find container \"1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730\": container with ID starting with 1ea045a0c6bf141c1ed6a1a93a5f5b394358998fcf50e0c7ae929f3a059d0730 not found: ID does not exist" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.729140 5081 scope.go:117] "RemoveContainer" containerID="dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335" Oct 03 15:53:39 crc kubenswrapper[5081]: E1003 15:53:39.729700 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335\": container with ID starting with dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335 not found: ID does not exist" containerID="dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.729739 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335"} err="failed to get container status \"dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335\": rpc error: code = NotFound desc = could not find container \"dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335\": container with ID starting with dda53aeafb3576bc30f4972fdb425fcb4d1a136df337f755a978eaf53edd8335 not found: ID does not exist" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.838934 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="684a3452-107a-4e1f-93a5-c063711e6377" path="/var/lib/kubelet/pods/684a3452-107a-4e1f-93a5-c063711e6377/volumes" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.839846 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" path="/var/lib/kubelet/pods/bd5af114-d170-46ce-8a46-ec0b65ddb545/volumes" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.840504 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e86e6033-7336-4deb-bf90-8c4941d56542" path="/var/lib/kubelet/pods/e86e6033-7336-4deb-bf90-8c4941d56542/volumes" Oct 03 15:53:39 crc kubenswrapper[5081]: I1003 15:53:39.841635 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" path="/var/lib/kubelet/pods/fcbf652f-a193-47f7-872b-e9864a40cd0a/volumes" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.544526 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xs59b"] Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545491 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545505 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545523 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-central-agent" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545528 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-central-agent" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545541 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca0c9b41-c081-4a81-90f2-730e16c7d347" containerName="memcached" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545547 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca0c9b41-c081-4a81-90f2-730e16c7d347" containerName="memcached" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545572 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545580 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545591 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="swift-recon-cron" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545599 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="swift-recon-cron" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545611 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="proxy-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545618 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="proxy-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545626 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="sg-core" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545633 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="sg-core" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545647 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545654 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545665 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545672 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545683 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7abf152b-a4ec-4114-bb59-491582952b05" containerName="nova-scheduler-scheduler" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545690 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7abf152b-a4ec-4114-bb59-491582952b05" containerName="nova-scheduler-scheduler" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545703 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545710 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545722 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545729 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545740 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545747 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545755 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b12ba3f3-51d4-4c3d-9677-d0a632be0974" containerName="kube-state-metrics" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545762 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b12ba3f3-51d4-4c3d-9677-d0a632be0974" containerName="kube-state-metrics" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545771 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b5887d-7cec-4aef-8d5c-041346dbc89f" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545778 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b5887d-7cec-4aef-8d5c-041346dbc89f" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545787 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545794 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545803 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerName="galera" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545810 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerName="galera" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545819 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545827 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-api" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545835 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-expirer" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545843 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-expirer" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545853 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545860 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545873 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545880 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545887 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545895 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545908 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerName="mysql-bootstrap" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545916 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerName="mysql-bootstrap" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545923 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="setup-container" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545930 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="setup-container" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545940 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server-init" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545947 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server-init" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545958 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545965 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-server" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545975 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe2dfd3-8b6d-466a-92f5-68e649d31298" containerName="nova-cell0-conductor-conductor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.545984 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe2dfd3-8b6d-466a-92f5-68e649d31298" containerName="nova-cell0-conductor-conductor" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.545994 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546001 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546009 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-metadata" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546016 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-metadata" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546031 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="rabbitmq" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546039 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="rabbitmq" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546047 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-notification-agent" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546055 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-notification-agent" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546067 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="155bac7b-055b-4bca-a155-f5ab13dacf80" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546074 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="155bac7b-055b-4bca-a155-f5ab13dacf80" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546083 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e86e6033-7336-4deb-bf90-8c4941d56542" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546092 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e86e6033-7336-4deb-bf90-8c4941d56542" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546107 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546114 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546127 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-updater" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546134 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-updater" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546141 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="rsync" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546147 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="rsync" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546159 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546166 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546173 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546179 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546187 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546194 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546205 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546212 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546222 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546229 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546242 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546249 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546262 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546270 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546283 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546292 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-api" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546307 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546314 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546327 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="ovn-northd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546335 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="ovn-northd" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546348 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-reaper" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546356 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-reaper" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546369 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4198a604-1b99-4822-9377-afaaef616d15" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546378 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4198a604-1b99-4822-9377-afaaef616d15" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546387 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546394 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546407 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="rabbitmq" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546414 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="rabbitmq" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546427 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerName="galera" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546434 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerName="galera" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546446 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546453 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546467 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="openstack-network-exporter" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546475 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="openstack-network-exporter" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546483 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="setup-container" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546491 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="setup-container" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546502 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546511 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546520 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546527 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546537 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546544 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-api" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546573 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546582 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-server" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546591 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546598 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546612 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546619 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546627 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-updater" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546634 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-updater" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546643 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546650 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546665 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546673 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546685 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a650c48d-76ae-45a3-b79c-e6e014009769" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546693 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a650c48d-76ae-45a3-b79c-e6e014009769" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546702 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546708 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546716 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546723 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546733 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546741 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener-log" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546753 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546759 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-server" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546769 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerName="mysql-bootstrap" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546776 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerName="mysql-bootstrap" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546784 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1151206-1989-4b3a-bc02-176a6f3cf481" containerName="nova-cell1-conductor-conductor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546791 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1151206-1989-4b3a-bc02-176a6f3cf481" containerName="nova-cell1-conductor-conductor" Oct 03 15:53:57 crc kubenswrapper[5081]: E1003 15:53:57.546807 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254b0c39-e3af-4a48-a954-5ff334d36670" containerName="keystone-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.546815 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="254b0c39-e3af-4a48-a954-5ff334d36670" containerName="keystone-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547018 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547043 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovsdb-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547060 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547067 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547076 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547089 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547101 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547112 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547123 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1151206-1989-4b3a-bc02-176a6f3cf481" containerName="nova-cell1-conductor-conductor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547147 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="rsync" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547157 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="49cb0be2-f988-48bc-afd9-bb4bd348de1f" containerName="barbican-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547165 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-central-agent" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547176 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="686d3fe6-8cc9-4013-a5f3-55fe41ac840e" containerName="ovn-controller" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547190 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a4c256-cdd6-4275-a2e6-6f7dd0f2a4d1" containerName="galera" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547202 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-server" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547214 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="ovn-northd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547227 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7abf152b-a4ec-4114-bb59-491582952b05" containerName="nova-scheduler-scheduler" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547236 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547246 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547254 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547265 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a650c48d-76ae-45a3-b79c-e6e014009769" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547276 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b12ba3f3-51d4-4c3d-9677-d0a632be0974" containerName="kube-state-metrics" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547287 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="ceilometer-notification-agent" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547297 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-metadata" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547306 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e86e6033-7336-4deb-bf90-8c4941d56542" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547319 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="684a3452-107a-4e1f-93a5-c063711e6377" containerName="barbican-keystone-listener-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547332 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-updater" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547344 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547351 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="container-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547362 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f41a1c07-9bcc-4237-869e-dff5d9c480f8" containerName="ovs-vswitchd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547373 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="proxy-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547385 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="477e7150-1a22-403b-950e-6d1547d2859c" containerName="openstack-network-exporter" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547396 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c38ca50-e27f-42f4-b828-12ca75618d53" containerName="sg-core" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547408 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="83b5887d-7cec-4aef-8d5c-041346dbc89f" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547418 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca0c9b41-c081-4a81-90f2-730e16c7d347" containerName="memcached" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547430 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547439 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-httpd" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547448 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfe2dfd3-8b6d-466a-92f5-68e649d31298" containerName="nova-cell0-conductor-conductor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547461 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547468 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5aeac4-dc04-4a3e-93cf-16b00842df35" containerName="nova-metadata-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547477 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="swift-recon-cron" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547487 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-expirer" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547500 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4198a604-1b99-4822-9377-afaaef616d15" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547507 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7060c6c8-bbe8-47ae-8ef2-4358291dbb61" containerName="rabbitmq" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547514 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="35ad4758-5e5f-4ba3-84be-a4ae754e9048" containerName="placement-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547522 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="155bac7b-055b-4bca-a155-f5ab13dacf80" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547530 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-auditor" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547538 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="681f8c15-9cbf-4416-83c4-36429c38a18d" containerName="barbican-keystone-listener-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547549 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c7a9174-2a15-43dd-8ef7-ce9dd7b040a5" containerName="mariadb-account-delete" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547580 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547593 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="074c619e-3cb7-417e-8192-9d13725cdde5" containerName="nova-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547604 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="264449b6-a64d-4d0b-a465-616fa49b3eca" containerName="neutron-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547622 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd5af114-d170-46ce-8a46-ec0b65ddb545" containerName="barbican-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547646 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64376f5-c10d-45cb-a9eb-81d9a4cd8b19" containerName="barbican-worker" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547665 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547676 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547687 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5344c615-93ae-4a4a-95b1-3bbe3327f42e" containerName="rabbitmq" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547696 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-updater" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547705 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6f1aa08-fd4f-4d96-a7e5-2862a41b5bc5" containerName="galera" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547713 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="254b0c39-e3af-4a48-a954-5ff334d36670" containerName="keystone-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547721 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6547ead1-44e3-45f3-a668-fff64776f1f6" containerName="cinder-api" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547733 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="account-reaper" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547743 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcbf652f-a193-47f7-872b-e9864a40cd0a" containerName="barbican-worker-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547754 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4791d8d2-2a2a-4595-8678-10ec383956f0" containerName="object-replicator" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547765 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8341a6f-4783-4bf6-916e-ac655208ba45" containerName="glance-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.547773 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed01e164-93b5-47c7-9ec0-7a00d6942c66" containerName="glance-log" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.548987 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.555282 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xs59b"] Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.644436 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-utilities\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.644487 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-495p7\" (UniqueName: \"kubernetes.io/projected/2bb3160e-241c-4690-a8a9-dffcf654f195-kube-api-access-495p7\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.644528 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-catalog-content\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.745865 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-catalog-content\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.745991 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-utilities\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.746013 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-495p7\" (UniqueName: \"kubernetes.io/projected/2bb3160e-241c-4690-a8a9-dffcf654f195-kube-api-access-495p7\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.746577 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-catalog-content\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.746747 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-utilities\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.768034 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-495p7\" (UniqueName: \"kubernetes.io/projected/2bb3160e-241c-4690-a8a9-dffcf654f195-kube-api-access-495p7\") pod \"certified-operators-xs59b\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:57 crc kubenswrapper[5081]: I1003 15:53:57.866589 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:53:58 crc kubenswrapper[5081]: I1003 15:53:58.349914 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xs59b"] Oct 03 15:53:58 crc kubenswrapper[5081]: I1003 15:53:58.802303 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xs59b" event={"ID":"2bb3160e-241c-4690-a8a9-dffcf654f195","Type":"ContainerStarted","Data":"e2231f5442e6a5eb017eacb5fddd1c3f3acc806f22275af7fd9a987b18ef8574"} Oct 03 15:53:59 crc kubenswrapper[5081]: I1003 15:53:59.811977 5081 generic.go:334] "Generic (PLEG): container finished" podID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerID="df5dfc479afb0a137d954f3a59ee357c2ced3849423e9157434854310d5b9b70" exitCode=0 Oct 03 15:53:59 crc kubenswrapper[5081]: I1003 15:53:59.812050 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xs59b" event={"ID":"2bb3160e-241c-4690-a8a9-dffcf654f195","Type":"ContainerDied","Data":"df5dfc479afb0a137d954f3a59ee357c2ced3849423e9157434854310d5b9b70"} Oct 03 15:54:00 crc kubenswrapper[5081]: I1003 15:54:00.647017 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 15:54:00 crc kubenswrapper[5081]: I1003 15:54:00.647073 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 15:54:00 crc kubenswrapper[5081]: I1003 15:54:00.647112 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 15:54:00 crc kubenswrapper[5081]: I1003 15:54:00.647629 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 15:54:00 crc kubenswrapper[5081]: I1003 15:54:00.647680 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" gracePeriod=600 Oct 03 15:54:01 crc kubenswrapper[5081]: E1003 15:54:01.347223 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:54:01 crc kubenswrapper[5081]: I1003 15:54:01.828874 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" exitCode=0 Oct 03 15:54:01 crc kubenswrapper[5081]: I1003 15:54:01.837022 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87"} Oct 03 15:54:01 crc kubenswrapper[5081]: I1003 15:54:01.837095 5081 scope.go:117] "RemoveContainer" containerID="1f5f972a5d9ee4f18b25fc20005ef3fc5efd95afd61322fe1547661c45157b16" Oct 03 15:54:01 crc kubenswrapper[5081]: I1003 15:54:01.837724 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:54:01 crc kubenswrapper[5081]: E1003 15:54:01.838028 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:54:02 crc kubenswrapper[5081]: I1003 15:54:02.836941 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xs59b" event={"ID":"2bb3160e-241c-4690-a8a9-dffcf654f195","Type":"ContainerStarted","Data":"f4344a7f0dd588ae53d22cede8c5b7cf907eeddd1a7065a90b5590790de1308a"} Oct 03 15:54:03 crc kubenswrapper[5081]: I1003 15:54:03.849939 5081 generic.go:334] "Generic (PLEG): container finished" podID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerID="f4344a7f0dd588ae53d22cede8c5b7cf907eeddd1a7065a90b5590790de1308a" exitCode=0 Oct 03 15:54:03 crc kubenswrapper[5081]: I1003 15:54:03.849990 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xs59b" event={"ID":"2bb3160e-241c-4690-a8a9-dffcf654f195","Type":"ContainerDied","Data":"f4344a7f0dd588ae53d22cede8c5b7cf907eeddd1a7065a90b5590790de1308a"} Oct 03 15:54:06 crc kubenswrapper[5081]: I1003 15:54:06.882193 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xs59b" event={"ID":"2bb3160e-241c-4690-a8a9-dffcf654f195","Type":"ContainerStarted","Data":"d77e4b21e1a2049d94458138b494d3255d9c17d74969cae2385a6cddc9621805"} Oct 03 15:54:06 crc kubenswrapper[5081]: I1003 15:54:06.901464 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xs59b" podStartSLOduration=3.5793357180000003 podStartE2EDuration="9.901446739s" podCreationTimestamp="2025-10-03 15:53:57 +0000 UTC" firstStartedPulling="2025-10-03 15:53:59.813713881 +0000 UTC m=+1558.779270494" lastFinishedPulling="2025-10-03 15:54:06.135824902 +0000 UTC m=+1565.101381515" observedRunningTime="2025-10-03 15:54:06.8976111 +0000 UTC m=+1565.863167753" watchObservedRunningTime="2025-10-03 15:54:06.901446739 +0000 UTC m=+1565.867003372" Oct 03 15:54:07 crc kubenswrapper[5081]: I1003 15:54:07.866904 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:54:07 crc kubenswrapper[5081]: I1003 15:54:07.866952 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:54:07 crc kubenswrapper[5081]: I1003 15:54:07.923397 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:54:13 crc kubenswrapper[5081]: I1003 15:54:13.827854 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:54:13 crc kubenswrapper[5081]: E1003 15:54:13.828833 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:54:17 crc kubenswrapper[5081]: I1003 15:54:17.909910 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:54:17 crc kubenswrapper[5081]: I1003 15:54:17.955360 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xs59b"] Oct 03 15:54:17 crc kubenswrapper[5081]: I1003 15:54:17.978620 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xs59b" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="registry-server" containerID="cri-o://d77e4b21e1a2049d94458138b494d3255d9c17d74969cae2385a6cddc9621805" gracePeriod=2 Oct 03 15:54:18 crc kubenswrapper[5081]: I1003 15:54:18.835834 5081 scope.go:117] "RemoveContainer" containerID="973a2dd03a9634033d615aa840a8203393ad6ba90b38c0ded957c070bd030e3f" Oct 03 15:54:18 crc kubenswrapper[5081]: I1003 15:54:18.975867 5081 scope.go:117] "RemoveContainer" containerID="f0dfbbe4c789c475a56039d3e5f56d642c183a192ba50df8037c1df1832128b4" Oct 03 15:54:19 crc kubenswrapper[5081]: I1003 15:54:19.009476 5081 scope.go:117] "RemoveContainer" containerID="190db332aa9ce963771e5f783cab24ab8b498b1573d76b02416354a661063d09" Oct 03 15:54:19 crc kubenswrapper[5081]: I1003 15:54:19.035262 5081 scope.go:117] "RemoveContainer" containerID="e9fbb481a454be536da4437d1b8adf8cc0b7c83a3173c9f8e7e021d1e234cb4c" Oct 03 15:54:19 crc kubenswrapper[5081]: I1003 15:54:19.067935 5081 scope.go:117] "RemoveContainer" containerID="093d59a50f7e6e5e82203965751ceb33aba255b5c54f027b9252d29af23a92c6" Oct 03 15:54:19 crc kubenswrapper[5081]: I1003 15:54:19.099363 5081 scope.go:117] "RemoveContainer" containerID="e23f33677c2414ff5f3fb51da521549962794cb0588c742db6be3c0002ff4347" Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.001527 5081 generic.go:334] "Generic (PLEG): container finished" podID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerID="d77e4b21e1a2049d94458138b494d3255d9c17d74969cae2385a6cddc9621805" exitCode=0 Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.001585 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xs59b" event={"ID":"2bb3160e-241c-4690-a8a9-dffcf654f195","Type":"ContainerDied","Data":"d77e4b21e1a2049d94458138b494d3255d9c17d74969cae2385a6cddc9621805"} Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.273520 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.451500 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-catalog-content\") pod \"2bb3160e-241c-4690-a8a9-dffcf654f195\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.451552 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-utilities\") pod \"2bb3160e-241c-4690-a8a9-dffcf654f195\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.451656 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-495p7\" (UniqueName: \"kubernetes.io/projected/2bb3160e-241c-4690-a8a9-dffcf654f195-kube-api-access-495p7\") pod \"2bb3160e-241c-4690-a8a9-dffcf654f195\" (UID: \"2bb3160e-241c-4690-a8a9-dffcf654f195\") " Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.452944 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-utilities" (OuterVolumeSpecName: "utilities") pod "2bb3160e-241c-4690-a8a9-dffcf654f195" (UID: "2bb3160e-241c-4690-a8a9-dffcf654f195"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.459343 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bb3160e-241c-4690-a8a9-dffcf654f195-kube-api-access-495p7" (OuterVolumeSpecName: "kube-api-access-495p7") pod "2bb3160e-241c-4690-a8a9-dffcf654f195" (UID: "2bb3160e-241c-4690-a8a9-dffcf654f195"). InnerVolumeSpecName "kube-api-access-495p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.506780 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2bb3160e-241c-4690-a8a9-dffcf654f195" (UID: "2bb3160e-241c-4690-a8a9-dffcf654f195"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.553635 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-495p7\" (UniqueName: \"kubernetes.io/projected/2bb3160e-241c-4690-a8a9-dffcf654f195-kube-api-access-495p7\") on node \"crc\" DevicePath \"\"" Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.554120 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 15:54:20 crc kubenswrapper[5081]: I1003 15:54:20.554139 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bb3160e-241c-4690-a8a9-dffcf654f195-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.030107 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xs59b" event={"ID":"2bb3160e-241c-4690-a8a9-dffcf654f195","Type":"ContainerDied","Data":"e2231f5442e6a5eb017eacb5fddd1c3f3acc806f22275af7fd9a987b18ef8574"} Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.030170 5081 scope.go:117] "RemoveContainer" containerID="d77e4b21e1a2049d94458138b494d3255d9c17d74969cae2385a6cddc9621805" Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.030172 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xs59b" Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.072354 5081 scope.go:117] "RemoveContainer" containerID="f4344a7f0dd588ae53d22cede8c5b7cf907eeddd1a7065a90b5590790de1308a" Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.084425 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xs59b"] Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.095031 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xs59b"] Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.108309 5081 scope.go:117] "RemoveContainer" containerID="df5dfc479afb0a137d954f3a59ee357c2ced3849423e9157434854310d5b9b70" Oct 03 15:54:21 crc kubenswrapper[5081]: I1003 15:54:21.847164 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" path="/var/lib/kubelet/pods/2bb3160e-241c-4690-a8a9-dffcf654f195/volumes" Oct 03 15:54:24 crc kubenswrapper[5081]: I1003 15:54:24.827817 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:54:24 crc kubenswrapper[5081]: E1003 15:54:24.828779 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:54:36 crc kubenswrapper[5081]: I1003 15:54:36.827328 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:54:36 crc kubenswrapper[5081]: E1003 15:54:36.828642 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:54:50 crc kubenswrapper[5081]: I1003 15:54:50.828046 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:54:50 crc kubenswrapper[5081]: E1003 15:54:50.828672 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:55:04 crc kubenswrapper[5081]: I1003 15:55:04.827740 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:55:04 crc kubenswrapper[5081]: E1003 15:55:04.828714 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:55:18 crc kubenswrapper[5081]: I1003 15:55:18.827880 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:55:18 crc kubenswrapper[5081]: E1003 15:55:18.828671 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.438501 5081 scope.go:117] "RemoveContainer" containerID="094774f80b389d20b60c03d34539966beac21be622dcb0337f5f4ccb2984989b" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.463997 5081 scope.go:117] "RemoveContainer" containerID="c9113f6ae45f47c36f72929f9ef5d16930dbce5fe4eaf6bc4e6ccb31c7482d8a" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.491793 5081 scope.go:117] "RemoveContainer" containerID="37c70bdce53077b7f4007fd4f9b0dd5d2ebb6fc02349ab8a99dcef4b5e632492" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.514509 5081 scope.go:117] "RemoveContainer" containerID="84123b425f08e68c1e7ab4696972c2e9b88d67c960a2818e57a2224785f30145" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.559073 5081 scope.go:117] "RemoveContainer" containerID="19c786c1e99f549fa8a5118fc3a61426531da6442d4e1c883048e996969dd30a" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.577481 5081 scope.go:117] "RemoveContainer" containerID="683a9cea3704f28dee554ad7d0a5fd46617595e708f08884aeeafa501eeeb131" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.604189 5081 scope.go:117] "RemoveContainer" containerID="fa10789b894f70b4d3616f3383ce75ef1f96a89dfb2da225a13e895d6558e0e9" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.632105 5081 scope.go:117] "RemoveContainer" containerID="3c45fe872da14002f3ab7d1037353971c602d0ceb557b052309704e9c49f8757" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.661264 5081 scope.go:117] "RemoveContainer" containerID="5667e13010e026cffae93f1f48fe2279663fa544702ab46b085a5c829713ef57" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.706905 5081 scope.go:117] "RemoveContainer" containerID="48a09e9ca64de36546e072756a97d761a030cb3e742c290692500230d25e59fd" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.747951 5081 scope.go:117] "RemoveContainer" containerID="d4899cf45c8e57fa395d11224d614fa21d3883f4753e27ab3a856ddce378f669" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.766664 5081 scope.go:117] "RemoveContainer" containerID="3234f43ec5e5008c4467212fab4fb164ae01868c28d804e4bf4b3e6bb71a6336" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.786318 5081 scope.go:117] "RemoveContainer" containerID="46324de20afe49419bacc89b855b20626c4259348867df225ac35af6b0ff3964" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.807174 5081 scope.go:117] "RemoveContainer" containerID="8607971b18cfb9e06089269e699a5ef2143d8a8931505756f195f8bf0fd0715f" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.826024 5081 scope.go:117] "RemoveContainer" containerID="67f277b31611a80868ec8ef47b063a503f8912117172f638b0d316cb15085922" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.858225 5081 scope.go:117] "RemoveContainer" containerID="7bb2c365bb5e50083ae986c7ff2d6a13940d0776ecb674a83c5e43e51215a427" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.902776 5081 scope.go:117] "RemoveContainer" containerID="139b61155eb21f18f967d59dc57cb8879b6e1eb054f36621551ecc1cecdc4357" Oct 03 15:55:19 crc kubenswrapper[5081]: I1003 15:55:19.942853 5081 scope.go:117] "RemoveContainer" containerID="e4534c113168e11113fe38107a8f500f4417dbbb179d3846aa9fce87a53a18d2" Oct 03 15:55:33 crc kubenswrapper[5081]: I1003 15:55:33.828503 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:55:33 crc kubenswrapper[5081]: E1003 15:55:33.829265 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:55:48 crc kubenswrapper[5081]: I1003 15:55:48.828042 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:55:48 crc kubenswrapper[5081]: E1003 15:55:48.828782 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:56:03 crc kubenswrapper[5081]: I1003 15:56:03.830874 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:56:03 crc kubenswrapper[5081]: E1003 15:56:03.832039 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:56:15 crc kubenswrapper[5081]: I1003 15:56:15.827798 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:56:15 crc kubenswrapper[5081]: E1003 15:56:15.828491 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.252226 5081 scope.go:117] "RemoveContainer" containerID="43de74b5485509f34e4471be5f662ab53707b407c18cca90d50f8b325a9af06d" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.308955 5081 scope.go:117] "RemoveContainer" containerID="5edccb45492f37a61a761bda119802fecb308fa81cec1b916185a47045d64830" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.331962 5081 scope.go:117] "RemoveContainer" containerID="18db5cd9adfb101e2f6a73d479aac7968e4fd25e0bc69fc94066119176608f62" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.359703 5081 scope.go:117] "RemoveContainer" containerID="68e340c1501e9dc43aadc37d497b3228a7433d115bf05a6a47d62aedac45c029" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.378453 5081 scope.go:117] "RemoveContainer" containerID="34c7f45d2c9cb180d7c08a560ddcfcf95e23f3fa8b56979e4ea9e41b424f888b" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.405076 5081 scope.go:117] "RemoveContainer" containerID="11222103517d49608c40c5db5a80c4fb0e6936aa3b9146f78928edb3436b5c6e" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.422473 5081 scope.go:117] "RemoveContainer" containerID="9791a255062a856210f6f6d869def3d35e782763a70a0b45636ea36481f99787" Oct 03 15:56:20 crc kubenswrapper[5081]: I1003 15:56:20.439037 5081 scope.go:117] "RemoveContainer" containerID="86505af40b69e0e0b9847b07e4658ae402a0c4c37caa4072e1ee5ae9768dc0f6" Oct 03 15:56:30 crc kubenswrapper[5081]: I1003 15:56:30.827502 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:56:30 crc kubenswrapper[5081]: E1003 15:56:30.828421 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:56:45 crc kubenswrapper[5081]: I1003 15:56:45.827496 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:56:45 crc kubenswrapper[5081]: E1003 15:56:45.828128 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:56:59 crc kubenswrapper[5081]: I1003 15:56:59.827633 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:56:59 crc kubenswrapper[5081]: E1003 15:56:59.828391 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:57:14 crc kubenswrapper[5081]: I1003 15:57:14.827513 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:57:14 crc kubenswrapper[5081]: E1003 15:57:14.828326 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:57:20 crc kubenswrapper[5081]: I1003 15:57:20.537671 5081 scope.go:117] "RemoveContainer" containerID="2742fc690da949e9d6cb940b865585ee6b2e33a45f3f8c6d67539be2bb0ca78b" Oct 03 15:57:20 crc kubenswrapper[5081]: I1003 15:57:20.579901 5081 scope.go:117] "RemoveContainer" containerID="03a0d5e5f0d7d2e3256428e9dfbde99cc9dffb0c85d0c20febe5d5a351f19d27" Oct 03 15:57:20 crc kubenswrapper[5081]: I1003 15:57:20.621700 5081 scope.go:117] "RemoveContainer" containerID="d76846d0dd605e9d375a82084d0b21e2838e242828180e38a07f9443254f2aa1" Oct 03 15:57:20 crc kubenswrapper[5081]: I1003 15:57:20.655840 5081 scope.go:117] "RemoveContainer" containerID="83d1bd011ea30374ecdb406909cb245479e6b28e66c4b2423fdfa0f4b32232e7" Oct 03 15:57:20 crc kubenswrapper[5081]: I1003 15:57:20.671836 5081 scope.go:117] "RemoveContainer" containerID="bdd48064d11b64a5aebc974e14c1981a110b42c382510f2c405ff834f90068db" Oct 03 15:57:20 crc kubenswrapper[5081]: I1003 15:57:20.695877 5081 scope.go:117] "RemoveContainer" containerID="8efa7cfbd3c0d58f4e81f012bf8245d9e47c0f18b0af8a5a13e7f5e6027ad8a3" Oct 03 15:57:20 crc kubenswrapper[5081]: I1003 15:57:20.713685 5081 scope.go:117] "RemoveContainer" containerID="73a7229a6dda8683762ca148305b6ad3a681e613e0f9cdd6e8995d83174cb3b3" Oct 03 15:57:26 crc kubenswrapper[5081]: I1003 15:57:26.827356 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:57:26 crc kubenswrapper[5081]: E1003 15:57:26.828167 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:57:41 crc kubenswrapper[5081]: I1003 15:57:41.833146 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:57:41 crc kubenswrapper[5081]: E1003 15:57:41.833835 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:57:52 crc kubenswrapper[5081]: I1003 15:57:52.827147 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:57:52 crc kubenswrapper[5081]: E1003 15:57:52.828048 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:58:07 crc kubenswrapper[5081]: I1003 15:58:07.828119 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:58:07 crc kubenswrapper[5081]: E1003 15:58:07.828874 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:58:20 crc kubenswrapper[5081]: I1003 15:58:20.825680 5081 scope.go:117] "RemoveContainer" containerID="97c93f28e63efe77b049abc94e9494220f89aeb5a45b0d485d1782988daeb773" Oct 03 15:58:20 crc kubenswrapper[5081]: I1003 15:58:20.848105 5081 scope.go:117] "RemoveContainer" containerID="16c0bbdb9a05648233faf864130036eaa9a7791b3b9c884c02732899ba53da61" Oct 03 15:58:20 crc kubenswrapper[5081]: I1003 15:58:20.900783 5081 scope.go:117] "RemoveContainer" containerID="acb498f39e09a0e5daf720cd0a2e76f5299382935696927f7ec5b9dc290467a4" Oct 03 15:58:20 crc kubenswrapper[5081]: I1003 15:58:20.915918 5081 scope.go:117] "RemoveContainer" containerID="f24a2e28a7c0ad1349c328c219041c0c090d938313a1795249776310239401b8" Oct 03 15:58:20 crc kubenswrapper[5081]: I1003 15:58:20.932556 5081 scope.go:117] "RemoveContainer" containerID="375aa21783d8942e6463ead5ec2f108cd31f251a593b6147df3ae5e0bcca62a7" Oct 03 15:58:21 crc kubenswrapper[5081]: I1003 15:58:21.833624 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:58:21 crc kubenswrapper[5081]: E1003 15:58:21.833972 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:58:35 crc kubenswrapper[5081]: I1003 15:58:35.827312 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:58:35 crc kubenswrapper[5081]: E1003 15:58:35.828117 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:58:47 crc kubenswrapper[5081]: I1003 15:58:47.827819 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:58:47 crc kubenswrapper[5081]: E1003 15:58:47.829162 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 15:59:01 crc kubenswrapper[5081]: I1003 15:59:01.832766 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 15:59:02 crc kubenswrapper[5081]: I1003 15:59:02.225857 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"1352559dc2af2add94b9f0673c3cb44e4f2124cd70cd7796aa9933150ff2e95c"} Oct 03 15:59:21 crc kubenswrapper[5081]: I1003 15:59:21.014394 5081 scope.go:117] "RemoveContainer" containerID="644e9ebb368a3aa467dad7915d9d3f3bbe1d1f79dd2a2f5ca01718aad4eb4385" Oct 03 15:59:21 crc kubenswrapper[5081]: I1003 15:59:21.042498 5081 scope.go:117] "RemoveContainer" containerID="1e5cd96417e6671023463d9a740c00e60c021e2e1be7b522e4ffbdd84d119b70" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.144848 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7"] Oct 03 16:00:00 crc kubenswrapper[5081]: E1003 16:00:00.145881 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="extract-content" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.145901 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="extract-content" Oct 03 16:00:00 crc kubenswrapper[5081]: E1003 16:00:00.145924 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="registry-server" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.145930 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="registry-server" Oct 03 16:00:00 crc kubenswrapper[5081]: E1003 16:00:00.145940 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="extract-utilities" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.145947 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="extract-utilities" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.146121 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bb3160e-241c-4690-a8a9-dffcf654f195" containerName="registry-server" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.146644 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.149135 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.149391 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.153435 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7"] Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.273998 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44nt7\" (UniqueName: \"kubernetes.io/projected/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-kube-api-access-44nt7\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.274095 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-secret-volume\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.274186 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-config-volume\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.375853 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44nt7\" (UniqueName: \"kubernetes.io/projected/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-kube-api-access-44nt7\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.375948 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-secret-volume\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.376044 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-config-volume\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.377230 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-config-volume\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.386147 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-secret-volume\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.400181 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44nt7\" (UniqueName: \"kubernetes.io/projected/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-kube-api-access-44nt7\") pod \"collect-profiles-29325120-qbhb7\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.464408 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:00 crc kubenswrapper[5081]: I1003 16:00:00.889171 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7"] Oct 03 16:00:01 crc kubenswrapper[5081]: I1003 16:00:01.628530 5081 generic.go:334] "Generic (PLEG): container finished" podID="5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" containerID="64a300e770e0aed506cbe308b2d72bbff77ce3372ec74048e0feff3a43e9e905" exitCode=0 Oct 03 16:00:01 crc kubenswrapper[5081]: I1003 16:00:01.628611 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" event={"ID":"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3","Type":"ContainerDied","Data":"64a300e770e0aed506cbe308b2d72bbff77ce3372ec74048e0feff3a43e9e905"} Oct 03 16:00:01 crc kubenswrapper[5081]: I1003 16:00:01.629969 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" event={"ID":"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3","Type":"ContainerStarted","Data":"d42c88ab86c13cac0fbbe922b7750c10b5035aa623c3464e5160eb033d664ac8"} Oct 03 16:00:02 crc kubenswrapper[5081]: I1003 16:00:02.878798 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.013305 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44nt7\" (UniqueName: \"kubernetes.io/projected/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-kube-api-access-44nt7\") pod \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.013372 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-secret-volume\") pod \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.013517 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-config-volume\") pod \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\" (UID: \"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3\") " Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.014217 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-config-volume" (OuterVolumeSpecName: "config-volume") pod "5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" (UID: "5c2eea03-99be-4909-b9e9-a2e6dbbc28d3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.018924 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" (UID: "5c2eea03-99be-4909-b9e9-a2e6dbbc28d3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.024123 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-kube-api-access-44nt7" (OuterVolumeSpecName: "kube-api-access-44nt7") pod "5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" (UID: "5c2eea03-99be-4909-b9e9-a2e6dbbc28d3"). InnerVolumeSpecName "kube-api-access-44nt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.115314 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.115358 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44nt7\" (UniqueName: \"kubernetes.io/projected/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-kube-api-access-44nt7\") on node \"crc\" DevicePath \"\"" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.115368 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.647084 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" event={"ID":"5c2eea03-99be-4909-b9e9-a2e6dbbc28d3","Type":"ContainerDied","Data":"d42c88ab86c13cac0fbbe922b7750c10b5035aa623c3464e5160eb033d664ac8"} Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.647126 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d42c88ab86c13cac0fbbe922b7750c10b5035aa623c3464e5160eb033d664ac8" Oct 03 16:00:03 crc kubenswrapper[5081]: I1003 16:00:03.647156 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.152717 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kg4cf"] Oct 03 16:00:23 crc kubenswrapper[5081]: E1003 16:00:23.154293 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" containerName="collect-profiles" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.154330 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" containerName="collect-profiles" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.154522 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" containerName="collect-profiles" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.155891 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.159535 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg4cf"] Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.184990 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-catalog-content\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.185051 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4b89\" (UniqueName: \"kubernetes.io/projected/de35ed63-f204-4e0b-8070-316a21c99a9b-kube-api-access-p4b89\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.185186 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-utilities\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.286128 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-utilities\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.286182 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-catalog-content\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.286204 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4b89\" (UniqueName: \"kubernetes.io/projected/de35ed63-f204-4e0b-8070-316a21c99a9b-kube-api-access-p4b89\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.286748 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-utilities\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.286940 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-catalog-content\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.310600 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4b89\" (UniqueName: \"kubernetes.io/projected/de35ed63-f204-4e0b-8070-316a21c99a9b-kube-api-access-p4b89\") pod \"redhat-marketplace-kg4cf\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.474058 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:23 crc kubenswrapper[5081]: I1003 16:00:23.870738 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg4cf"] Oct 03 16:00:24 crc kubenswrapper[5081]: I1003 16:00:24.793475 5081 generic.go:334] "Generic (PLEG): container finished" podID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerID="a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb" exitCode=0 Oct 03 16:00:24 crc kubenswrapper[5081]: I1003 16:00:24.793548 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg4cf" event={"ID":"de35ed63-f204-4e0b-8070-316a21c99a9b","Type":"ContainerDied","Data":"a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb"} Oct 03 16:00:24 crc kubenswrapper[5081]: I1003 16:00:24.793857 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg4cf" event={"ID":"de35ed63-f204-4e0b-8070-316a21c99a9b","Type":"ContainerStarted","Data":"82f104cc2371cb49fd46d57ce6984da5495f56308e4c0809b5850cea8e055502"} Oct 03 16:00:24 crc kubenswrapper[5081]: I1003 16:00:24.796358 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:00:25 crc kubenswrapper[5081]: I1003 16:00:25.802999 5081 generic.go:334] "Generic (PLEG): container finished" podID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerID="8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044" exitCode=0 Oct 03 16:00:25 crc kubenswrapper[5081]: I1003 16:00:25.803113 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg4cf" event={"ID":"de35ed63-f204-4e0b-8070-316a21c99a9b","Type":"ContainerDied","Data":"8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044"} Oct 03 16:00:26 crc kubenswrapper[5081]: I1003 16:00:26.811142 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg4cf" event={"ID":"de35ed63-f204-4e0b-8070-316a21c99a9b","Type":"ContainerStarted","Data":"9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4"} Oct 03 16:00:26 crc kubenswrapper[5081]: I1003 16:00:26.830319 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kg4cf" podStartSLOduration=2.152052616 podStartE2EDuration="3.830301404s" podCreationTimestamp="2025-10-03 16:00:23 +0000 UTC" firstStartedPulling="2025-10-03 16:00:24.796078397 +0000 UTC m=+1943.761635010" lastFinishedPulling="2025-10-03 16:00:26.474327195 +0000 UTC m=+1945.439883798" observedRunningTime="2025-10-03 16:00:26.826345 +0000 UTC m=+1945.791901643" watchObservedRunningTime="2025-10-03 16:00:26.830301404 +0000 UTC m=+1945.795858007" Oct 03 16:00:33 crc kubenswrapper[5081]: I1003 16:00:33.474780 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:33 crc kubenswrapper[5081]: I1003 16:00:33.475351 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:33 crc kubenswrapper[5081]: I1003 16:00:33.568773 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:33 crc kubenswrapper[5081]: I1003 16:00:33.903063 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:33 crc kubenswrapper[5081]: I1003 16:00:33.950874 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg4cf"] Oct 03 16:00:35 crc kubenswrapper[5081]: I1003 16:00:35.877137 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kg4cf" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="registry-server" containerID="cri-o://9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4" gracePeriod=2 Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.355417 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.366669 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4b89\" (UniqueName: \"kubernetes.io/projected/de35ed63-f204-4e0b-8070-316a21c99a9b-kube-api-access-p4b89\") pod \"de35ed63-f204-4e0b-8070-316a21c99a9b\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.366822 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-utilities\") pod \"de35ed63-f204-4e0b-8070-316a21c99a9b\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.366848 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-catalog-content\") pod \"de35ed63-f204-4e0b-8070-316a21c99a9b\" (UID: \"de35ed63-f204-4e0b-8070-316a21c99a9b\") " Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.368327 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-utilities" (OuterVolumeSpecName: "utilities") pod "de35ed63-f204-4e0b-8070-316a21c99a9b" (UID: "de35ed63-f204-4e0b-8070-316a21c99a9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.372718 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de35ed63-f204-4e0b-8070-316a21c99a9b-kube-api-access-p4b89" (OuterVolumeSpecName: "kube-api-access-p4b89") pod "de35ed63-f204-4e0b-8070-316a21c99a9b" (UID: "de35ed63-f204-4e0b-8070-316a21c99a9b"). InnerVolumeSpecName "kube-api-access-p4b89". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.384109 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de35ed63-f204-4e0b-8070-316a21c99a9b" (UID: "de35ed63-f204-4e0b-8070-316a21c99a9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.467937 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.467974 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de35ed63-f204-4e0b-8070-316a21c99a9b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.467984 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4b89\" (UniqueName: \"kubernetes.io/projected/de35ed63-f204-4e0b-8070-316a21c99a9b-kube-api-access-p4b89\") on node \"crc\" DevicePath \"\"" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.884702 5081 generic.go:334] "Generic (PLEG): container finished" podID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerID="9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4" exitCode=0 Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.884752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg4cf" event={"ID":"de35ed63-f204-4e0b-8070-316a21c99a9b","Type":"ContainerDied","Data":"9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4"} Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.884779 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg4cf" event={"ID":"de35ed63-f204-4e0b-8070-316a21c99a9b","Type":"ContainerDied","Data":"82f104cc2371cb49fd46d57ce6984da5495f56308e4c0809b5850cea8e055502"} Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.884795 5081 scope.go:117] "RemoveContainer" containerID="9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.884754 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg4cf" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.905203 5081 scope.go:117] "RemoveContainer" containerID="8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.923210 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg4cf"] Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.935386 5081 scope.go:117] "RemoveContainer" containerID="a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.935957 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg4cf"] Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.962776 5081 scope.go:117] "RemoveContainer" containerID="9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4" Oct 03 16:00:36 crc kubenswrapper[5081]: E1003 16:00:36.963823 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4\": container with ID starting with 9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4 not found: ID does not exist" containerID="9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.963854 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4"} err="failed to get container status \"9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4\": rpc error: code = NotFound desc = could not find container \"9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4\": container with ID starting with 9b7c972b47a7c43e32777219143568f73f89096607052b6cf2a89549d3b55dd4 not found: ID does not exist" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.963931 5081 scope.go:117] "RemoveContainer" containerID="8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044" Oct 03 16:00:36 crc kubenswrapper[5081]: E1003 16:00:36.964291 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044\": container with ID starting with 8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044 not found: ID does not exist" containerID="8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.964363 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044"} err="failed to get container status \"8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044\": rpc error: code = NotFound desc = could not find container \"8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044\": container with ID starting with 8bacfb95bdfb28b916f08317e471ecc357d97608888a23aa0ae20f1fb7a6f044 not found: ID does not exist" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.964403 5081 scope.go:117] "RemoveContainer" containerID="a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb" Oct 03 16:00:36 crc kubenswrapper[5081]: E1003 16:00:36.964740 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb\": container with ID starting with a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb not found: ID does not exist" containerID="a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb" Oct 03 16:00:36 crc kubenswrapper[5081]: I1003 16:00:36.964763 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb"} err="failed to get container status \"a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb\": rpc error: code = NotFound desc = could not find container \"a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb\": container with ID starting with a10d6bd7ce9c688d55428df0803af1d7263daf3577dd31c921d768cf86811feb not found: ID does not exist" Oct 03 16:00:37 crc kubenswrapper[5081]: I1003 16:00:37.836313 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" path="/var/lib/kubelet/pods/de35ed63-f204-4e0b-8070-316a21c99a9b/volumes" Oct 03 16:01:30 crc kubenswrapper[5081]: I1003 16:01:30.647434 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:01:30 crc kubenswrapper[5081]: I1003 16:01:30.648010 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:02:00 crc kubenswrapper[5081]: I1003 16:02:00.648198 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:02:00 crc kubenswrapper[5081]: I1003 16:02:00.648785 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:02:30 crc kubenswrapper[5081]: I1003 16:02:30.647128 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:02:30 crc kubenswrapper[5081]: I1003 16:02:30.647700 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:02:30 crc kubenswrapper[5081]: I1003 16:02:30.647752 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:02:30 crc kubenswrapper[5081]: I1003 16:02:30.648341 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1352559dc2af2add94b9f0673c3cb44e4f2124cd70cd7796aa9933150ff2e95c"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:02:30 crc kubenswrapper[5081]: I1003 16:02:30.648408 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://1352559dc2af2add94b9f0673c3cb44e4f2124cd70cd7796aa9933150ff2e95c" gracePeriod=600 Oct 03 16:02:31 crc kubenswrapper[5081]: I1003 16:02:31.644491 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="1352559dc2af2add94b9f0673c3cb44e4f2124cd70cd7796aa9933150ff2e95c" exitCode=0 Oct 03 16:02:31 crc kubenswrapper[5081]: I1003 16:02:31.644595 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"1352559dc2af2add94b9f0673c3cb44e4f2124cd70cd7796aa9933150ff2e95c"} Oct 03 16:02:31 crc kubenswrapper[5081]: I1003 16:02:31.644898 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a"} Oct 03 16:02:31 crc kubenswrapper[5081]: I1003 16:02:31.644926 5081 scope.go:117] "RemoveContainer" containerID="9216e0686b03f547cc4b236b3a8abccd18aead9c8b56555feba6735ff5c37d87" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.041894 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pm5k7"] Oct 03 16:03:04 crc kubenswrapper[5081]: E1003 16:03:04.043175 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="extract-utilities" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.043196 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="extract-utilities" Oct 03 16:03:04 crc kubenswrapper[5081]: E1003 16:03:04.043213 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="registry-server" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.043220 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="registry-server" Oct 03 16:03:04 crc kubenswrapper[5081]: E1003 16:03:04.043234 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="extract-content" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.043241 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="extract-content" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.043421 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="de35ed63-f204-4e0b-8070-316a21c99a9b" containerName="registry-server" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.044922 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.049117 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pm5k7"] Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.171118 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-utilities\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.171337 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-catalog-content\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.171397 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qps89\" (UniqueName: \"kubernetes.io/projected/04ebcc51-55d4-472e-a936-442252ac481d-kube-api-access-qps89\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.272479 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qps89\" (UniqueName: \"kubernetes.io/projected/04ebcc51-55d4-472e-a936-442252ac481d-kube-api-access-qps89\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.272592 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-utilities\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.272676 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-catalog-content\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.273235 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-utilities\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.273286 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-catalog-content\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.302744 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qps89\" (UniqueName: \"kubernetes.io/projected/04ebcc51-55d4-472e-a936-442252ac481d-kube-api-access-qps89\") pod \"community-operators-pm5k7\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.374725 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.841184 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pm5k7"] Oct 03 16:03:04 crc kubenswrapper[5081]: I1003 16:03:04.880889 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pm5k7" event={"ID":"04ebcc51-55d4-472e-a936-442252ac481d","Type":"ContainerStarted","Data":"93528dd2f597f1e8e879d78faa23e23d4b354d699da9e762ffcd19f0d35d6ddf"} Oct 03 16:03:05 crc kubenswrapper[5081]: I1003 16:03:05.888973 5081 generic.go:334] "Generic (PLEG): container finished" podID="04ebcc51-55d4-472e-a936-442252ac481d" containerID="dd1ba79b3dd39a0b3e5e1a66fe828a5835fce1ae1c7582f957204112b1079f5b" exitCode=0 Oct 03 16:03:05 crc kubenswrapper[5081]: I1003 16:03:05.889211 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pm5k7" event={"ID":"04ebcc51-55d4-472e-a936-442252ac481d","Type":"ContainerDied","Data":"dd1ba79b3dd39a0b3e5e1a66fe828a5835fce1ae1c7582f957204112b1079f5b"} Oct 03 16:03:06 crc kubenswrapper[5081]: I1003 16:03:06.897454 5081 generic.go:334] "Generic (PLEG): container finished" podID="04ebcc51-55d4-472e-a936-442252ac481d" containerID="38005ddc4dfb650ef08157a60d367b1fa7ea8358682a5d62a7490ffdec47393d" exitCode=0 Oct 03 16:03:06 crc kubenswrapper[5081]: I1003 16:03:06.897549 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pm5k7" event={"ID":"04ebcc51-55d4-472e-a936-442252ac481d","Type":"ContainerDied","Data":"38005ddc4dfb650ef08157a60d367b1fa7ea8358682a5d62a7490ffdec47393d"} Oct 03 16:03:07 crc kubenswrapper[5081]: I1003 16:03:07.907541 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pm5k7" event={"ID":"04ebcc51-55d4-472e-a936-442252ac481d","Type":"ContainerStarted","Data":"22a8cdd7685d7c553ce44cd1b4abf987883369e5001fc37fb8ec47d48ca56897"} Oct 03 16:03:07 crc kubenswrapper[5081]: I1003 16:03:07.940893 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pm5k7" podStartSLOduration=2.359281271 podStartE2EDuration="3.940878471s" podCreationTimestamp="2025-10-03 16:03:04 +0000 UTC" firstStartedPulling="2025-10-03 16:03:05.891159398 +0000 UTC m=+2104.856716011" lastFinishedPulling="2025-10-03 16:03:07.472756598 +0000 UTC m=+2106.438313211" observedRunningTime="2025-10-03 16:03:07.937056961 +0000 UTC m=+2106.902613574" watchObservedRunningTime="2025-10-03 16:03:07.940878471 +0000 UTC m=+2106.906435084" Oct 03 16:03:14 crc kubenswrapper[5081]: I1003 16:03:14.375792 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:14 crc kubenswrapper[5081]: I1003 16:03:14.376204 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:14 crc kubenswrapper[5081]: I1003 16:03:14.419608 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:15 crc kubenswrapper[5081]: I1003 16:03:15.002997 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:15 crc kubenswrapper[5081]: I1003 16:03:15.052096 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pm5k7"] Oct 03 16:03:16 crc kubenswrapper[5081]: I1003 16:03:16.973239 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pm5k7" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="registry-server" containerID="cri-o://22a8cdd7685d7c553ce44cd1b4abf987883369e5001fc37fb8ec47d48ca56897" gracePeriod=2 Oct 03 16:03:17 crc kubenswrapper[5081]: I1003 16:03:17.985281 5081 generic.go:334] "Generic (PLEG): container finished" podID="04ebcc51-55d4-472e-a936-442252ac481d" containerID="22a8cdd7685d7c553ce44cd1b4abf987883369e5001fc37fb8ec47d48ca56897" exitCode=0 Oct 03 16:03:17 crc kubenswrapper[5081]: I1003 16:03:17.985391 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pm5k7" event={"ID":"04ebcc51-55d4-472e-a936-442252ac481d","Type":"ContainerDied","Data":"22a8cdd7685d7c553ce44cd1b4abf987883369e5001fc37fb8ec47d48ca56897"} Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.065312 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.183858 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-catalog-content\") pod \"04ebcc51-55d4-472e-a936-442252ac481d\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.184019 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qps89\" (UniqueName: \"kubernetes.io/projected/04ebcc51-55d4-472e-a936-442252ac481d-kube-api-access-qps89\") pod \"04ebcc51-55d4-472e-a936-442252ac481d\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.184075 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-utilities\") pod \"04ebcc51-55d4-472e-a936-442252ac481d\" (UID: \"04ebcc51-55d4-472e-a936-442252ac481d\") " Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.184989 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-utilities" (OuterVolumeSpecName: "utilities") pod "04ebcc51-55d4-472e-a936-442252ac481d" (UID: "04ebcc51-55d4-472e-a936-442252ac481d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.190055 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04ebcc51-55d4-472e-a936-442252ac481d-kube-api-access-qps89" (OuterVolumeSpecName: "kube-api-access-qps89") pod "04ebcc51-55d4-472e-a936-442252ac481d" (UID: "04ebcc51-55d4-472e-a936-442252ac481d"). InnerVolumeSpecName "kube-api-access-qps89". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.231300 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04ebcc51-55d4-472e-a936-442252ac481d" (UID: "04ebcc51-55d4-472e-a936-442252ac481d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.285819 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.285876 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcc51-55d4-472e-a936-442252ac481d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.285898 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qps89\" (UniqueName: \"kubernetes.io/projected/04ebcc51-55d4-472e-a936-442252ac481d-kube-api-access-qps89\") on node \"crc\" DevicePath \"\"" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.993777 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pm5k7" event={"ID":"04ebcc51-55d4-472e-a936-442252ac481d","Type":"ContainerDied","Data":"93528dd2f597f1e8e879d78faa23e23d4b354d699da9e762ffcd19f0d35d6ddf"} Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.993826 5081 scope.go:117] "RemoveContainer" containerID="22a8cdd7685d7c553ce44cd1b4abf987883369e5001fc37fb8ec47d48ca56897" Oct 03 16:03:18 crc kubenswrapper[5081]: I1003 16:03:18.993937 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pm5k7" Oct 03 16:03:19 crc kubenswrapper[5081]: I1003 16:03:19.020065 5081 scope.go:117] "RemoveContainer" containerID="38005ddc4dfb650ef08157a60d367b1fa7ea8358682a5d62a7490ffdec47393d" Oct 03 16:03:19 crc kubenswrapper[5081]: I1003 16:03:19.030045 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pm5k7"] Oct 03 16:03:19 crc kubenswrapper[5081]: I1003 16:03:19.039026 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pm5k7"] Oct 03 16:03:19 crc kubenswrapper[5081]: I1003 16:03:19.043626 5081 scope.go:117] "RemoveContainer" containerID="dd1ba79b3dd39a0b3e5e1a66fe828a5835fce1ae1c7582f957204112b1079f5b" Oct 03 16:03:19 crc kubenswrapper[5081]: I1003 16:03:19.838542 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04ebcc51-55d4-472e-a936-442252ac481d" path="/var/lib/kubelet/pods/04ebcc51-55d4-472e-a936-442252ac481d/volumes" Oct 03 16:04:30 crc kubenswrapper[5081]: I1003 16:04:30.647093 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:04:30 crc kubenswrapper[5081]: I1003 16:04:30.647661 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.715180 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qklz7"] Oct 03 16:04:36 crc kubenswrapper[5081]: E1003 16:04:36.715914 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="extract-content" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.715929 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="extract-content" Oct 03 16:04:36 crc kubenswrapper[5081]: E1003 16:04:36.715973 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="extract-utilities" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.715983 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="extract-utilities" Oct 03 16:04:36 crc kubenswrapper[5081]: E1003 16:04:36.715996 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="registry-server" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.716001 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="registry-server" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.716131 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ebcc51-55d4-472e-a936-442252ac481d" containerName="registry-server" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.717070 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.736401 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qklz7"] Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.860484 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-catalog-content\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.860522 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9jzb\" (UniqueName: \"kubernetes.io/projected/da8dd0ea-5bbd-4592-af05-4df547cd5306-kube-api-access-k9jzb\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.860606 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-utilities\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.961809 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-utilities\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.962164 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-catalog-content\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.962277 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9jzb\" (UniqueName: \"kubernetes.io/projected/da8dd0ea-5bbd-4592-af05-4df547cd5306-kube-api-access-k9jzb\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.962463 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-utilities\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.962631 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-catalog-content\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:36 crc kubenswrapper[5081]: I1003 16:04:36.999372 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9jzb\" (UniqueName: \"kubernetes.io/projected/da8dd0ea-5bbd-4592-af05-4df547cd5306-kube-api-access-k9jzb\") pod \"certified-operators-qklz7\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:37 crc kubenswrapper[5081]: I1003 16:04:37.035900 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:37 crc kubenswrapper[5081]: I1003 16:04:37.569343 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qklz7"] Oct 03 16:04:38 crc kubenswrapper[5081]: I1003 16:04:38.531709 5081 generic.go:334] "Generic (PLEG): container finished" podID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerID="1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627" exitCode=0 Oct 03 16:04:38 crc kubenswrapper[5081]: I1003 16:04:38.531798 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qklz7" event={"ID":"da8dd0ea-5bbd-4592-af05-4df547cd5306","Type":"ContainerDied","Data":"1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627"} Oct 03 16:04:38 crc kubenswrapper[5081]: I1003 16:04:38.533278 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qklz7" event={"ID":"da8dd0ea-5bbd-4592-af05-4df547cd5306","Type":"ContainerStarted","Data":"a1a8000a1470d0655a899e15fdbbd28e76e891e9885efca1cfa9330bbbdd4b55"} Oct 03 16:04:39 crc kubenswrapper[5081]: I1003 16:04:39.543612 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qklz7" event={"ID":"da8dd0ea-5bbd-4592-af05-4df547cd5306","Type":"ContainerStarted","Data":"c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398"} Oct 03 16:04:40 crc kubenswrapper[5081]: I1003 16:04:40.551717 5081 generic.go:334] "Generic (PLEG): container finished" podID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerID="c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398" exitCode=0 Oct 03 16:04:40 crc kubenswrapper[5081]: I1003 16:04:40.551762 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qklz7" event={"ID":"da8dd0ea-5bbd-4592-af05-4df547cd5306","Type":"ContainerDied","Data":"c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398"} Oct 03 16:04:42 crc kubenswrapper[5081]: I1003 16:04:42.587154 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qklz7" event={"ID":"da8dd0ea-5bbd-4592-af05-4df547cd5306","Type":"ContainerStarted","Data":"015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355"} Oct 03 16:04:42 crc kubenswrapper[5081]: I1003 16:04:42.612751 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qklz7" podStartSLOduration=3.614589329 podStartE2EDuration="6.612735308s" podCreationTimestamp="2025-10-03 16:04:36 +0000 UTC" firstStartedPulling="2025-10-03 16:04:38.534895824 +0000 UTC m=+2197.500452437" lastFinishedPulling="2025-10-03 16:04:41.533041793 +0000 UTC m=+2200.498598416" observedRunningTime="2025-10-03 16:04:42.610030631 +0000 UTC m=+2201.575587254" watchObservedRunningTime="2025-10-03 16:04:42.612735308 +0000 UTC m=+2201.578291931" Oct 03 16:04:47 crc kubenswrapper[5081]: I1003 16:04:47.037069 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:47 crc kubenswrapper[5081]: I1003 16:04:47.037433 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:47 crc kubenswrapper[5081]: I1003 16:04:47.077141 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:47 crc kubenswrapper[5081]: I1003 16:04:47.662984 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:47 crc kubenswrapper[5081]: I1003 16:04:47.706462 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qklz7"] Oct 03 16:04:49 crc kubenswrapper[5081]: I1003 16:04:49.640779 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qklz7" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="registry-server" containerID="cri-o://015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355" gracePeriod=2 Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.024761 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.153064 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-catalog-content\") pod \"da8dd0ea-5bbd-4592-af05-4df547cd5306\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.153213 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-utilities\") pod \"da8dd0ea-5bbd-4592-af05-4df547cd5306\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.153324 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9jzb\" (UniqueName: \"kubernetes.io/projected/da8dd0ea-5bbd-4592-af05-4df547cd5306-kube-api-access-k9jzb\") pod \"da8dd0ea-5bbd-4592-af05-4df547cd5306\" (UID: \"da8dd0ea-5bbd-4592-af05-4df547cd5306\") " Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.155338 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-utilities" (OuterVolumeSpecName: "utilities") pod "da8dd0ea-5bbd-4592-af05-4df547cd5306" (UID: "da8dd0ea-5bbd-4592-af05-4df547cd5306"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.166953 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da8dd0ea-5bbd-4592-af05-4df547cd5306-kube-api-access-k9jzb" (OuterVolumeSpecName: "kube-api-access-k9jzb") pod "da8dd0ea-5bbd-4592-af05-4df547cd5306" (UID: "da8dd0ea-5bbd-4592-af05-4df547cd5306"). InnerVolumeSpecName "kube-api-access-k9jzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.255636 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.255671 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9jzb\" (UniqueName: \"kubernetes.io/projected/da8dd0ea-5bbd-4592-af05-4df547cd5306-kube-api-access-k9jzb\") on node \"crc\" DevicePath \"\"" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.649296 5081 generic.go:334] "Generic (PLEG): container finished" podID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerID="015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355" exitCode=0 Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.649350 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qklz7" event={"ID":"da8dd0ea-5bbd-4592-af05-4df547cd5306","Type":"ContainerDied","Data":"015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355"} Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.649358 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qklz7" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.649382 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qklz7" event={"ID":"da8dd0ea-5bbd-4592-af05-4df547cd5306","Type":"ContainerDied","Data":"a1a8000a1470d0655a899e15fdbbd28e76e891e9885efca1cfa9330bbbdd4b55"} Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.649403 5081 scope.go:117] "RemoveContainer" containerID="015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.669894 5081 scope.go:117] "RemoveContainer" containerID="c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.685209 5081 scope.go:117] "RemoveContainer" containerID="1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.710937 5081 scope.go:117] "RemoveContainer" containerID="015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355" Oct 03 16:04:50 crc kubenswrapper[5081]: E1003 16:04:50.711379 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355\": container with ID starting with 015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355 not found: ID does not exist" containerID="015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.711419 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355"} err="failed to get container status \"015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355\": rpc error: code = NotFound desc = could not find container \"015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355\": container with ID starting with 015d89192d8ca94e314987e12cf57b7975e2637870fb446d246e88d1e7bc3355 not found: ID does not exist" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.711465 5081 scope.go:117] "RemoveContainer" containerID="c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398" Oct 03 16:04:50 crc kubenswrapper[5081]: E1003 16:04:50.711868 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398\": container with ID starting with c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398 not found: ID does not exist" containerID="c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.711899 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398"} err="failed to get container status \"c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398\": rpc error: code = NotFound desc = could not find container \"c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398\": container with ID starting with c6c0d1cb262c983ff99695d10ba53137718e5e1d26921df04356ed8acc454398 not found: ID does not exist" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.711917 5081 scope.go:117] "RemoveContainer" containerID="1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627" Oct 03 16:04:50 crc kubenswrapper[5081]: E1003 16:04:50.712194 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627\": container with ID starting with 1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627 not found: ID does not exist" containerID="1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.712223 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627"} err="failed to get container status \"1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627\": rpc error: code = NotFound desc = could not find container \"1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627\": container with ID starting with 1129345c64e0be72695b1a39bb1ad046432f72a175f787835bc4f77c3791a627 not found: ID does not exist" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.921826 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da8dd0ea-5bbd-4592-af05-4df547cd5306" (UID: "da8dd0ea-5bbd-4592-af05-4df547cd5306"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.972804 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da8dd0ea-5bbd-4592-af05-4df547cd5306-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:04:50 crc kubenswrapper[5081]: I1003 16:04:50.996721 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qklz7"] Oct 03 16:04:51 crc kubenswrapper[5081]: I1003 16:04:51.005521 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qklz7"] Oct 03 16:04:51 crc kubenswrapper[5081]: I1003 16:04:51.838538 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" path="/var/lib/kubelet/pods/da8dd0ea-5bbd-4592-af05-4df547cd5306/volumes" Oct 03 16:05:00 crc kubenswrapper[5081]: I1003 16:05:00.647311 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:05:00 crc kubenswrapper[5081]: I1003 16:05:00.647941 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.647818 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.648376 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.648426 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.649069 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.649118 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" gracePeriod=600 Oct 03 16:05:30 crc kubenswrapper[5081]: E1003 16:05:30.768289 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.917725 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" exitCode=0 Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.917799 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a"} Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.917858 5081 scope.go:117] "RemoveContainer" containerID="1352559dc2af2add94b9f0673c3cb44e4f2124cd70cd7796aa9933150ff2e95c" Oct 03 16:05:30 crc kubenswrapper[5081]: I1003 16:05:30.918385 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:05:30 crc kubenswrapper[5081]: E1003 16:05:30.918769 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:05:41 crc kubenswrapper[5081]: I1003 16:05:41.831693 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:05:41 crc kubenswrapper[5081]: E1003 16:05:41.832417 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:05:56 crc kubenswrapper[5081]: I1003 16:05:56.828761 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:05:56 crc kubenswrapper[5081]: E1003 16:05:56.830018 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:06:09 crc kubenswrapper[5081]: I1003 16:06:09.827712 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:06:09 crc kubenswrapper[5081]: E1003 16:06:09.828432 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:06:20 crc kubenswrapper[5081]: I1003 16:06:20.827770 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:06:20 crc kubenswrapper[5081]: E1003 16:06:20.828478 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:06:35 crc kubenswrapper[5081]: I1003 16:06:35.828033 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:06:35 crc kubenswrapper[5081]: E1003 16:06:35.828847 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:06:48 crc kubenswrapper[5081]: I1003 16:06:48.828379 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:06:48 crc kubenswrapper[5081]: E1003 16:06:48.829152 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:07:00 crc kubenswrapper[5081]: I1003 16:07:00.827671 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:07:00 crc kubenswrapper[5081]: E1003 16:07:00.828696 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:07:12 crc kubenswrapper[5081]: I1003 16:07:12.827961 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:07:12 crc kubenswrapper[5081]: E1003 16:07:12.829202 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:07:27 crc kubenswrapper[5081]: I1003 16:07:27.828036 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:07:27 crc kubenswrapper[5081]: E1003 16:07:27.828928 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:07:40 crc kubenswrapper[5081]: I1003 16:07:40.827747 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:07:40 crc kubenswrapper[5081]: E1003 16:07:40.828499 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:07:55 crc kubenswrapper[5081]: I1003 16:07:55.828128 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:07:55 crc kubenswrapper[5081]: E1003 16:07:55.829026 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:08:08 crc kubenswrapper[5081]: I1003 16:08:08.827937 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:08:08 crc kubenswrapper[5081]: E1003 16:08:08.828849 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:08:21 crc kubenswrapper[5081]: I1003 16:08:21.832676 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:08:21 crc kubenswrapper[5081]: E1003 16:08:21.833991 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:08:33 crc kubenswrapper[5081]: I1003 16:08:33.827464 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:08:33 crc kubenswrapper[5081]: E1003 16:08:33.828120 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:08:48 crc kubenswrapper[5081]: I1003 16:08:48.827144 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:08:48 crc kubenswrapper[5081]: E1003 16:08:48.827804 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:09:01 crc kubenswrapper[5081]: I1003 16:09:01.833878 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:09:01 crc kubenswrapper[5081]: E1003 16:09:01.834941 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:09:12 crc kubenswrapper[5081]: I1003 16:09:12.827506 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:09:12 crc kubenswrapper[5081]: E1003 16:09:12.828250 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:09:23 crc kubenswrapper[5081]: I1003 16:09:23.827799 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:09:23 crc kubenswrapper[5081]: E1003 16:09:23.828373 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:09:35 crc kubenswrapper[5081]: I1003 16:09:35.827916 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:09:35 crc kubenswrapper[5081]: E1003 16:09:35.829218 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:09:50 crc kubenswrapper[5081]: I1003 16:09:50.828167 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:09:50 crc kubenswrapper[5081]: E1003 16:09:50.829391 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:10:01 crc kubenswrapper[5081]: I1003 16:10:01.839932 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:10:01 crc kubenswrapper[5081]: E1003 16:10:01.840773 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:10:14 crc kubenswrapper[5081]: I1003 16:10:14.829124 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:10:14 crc kubenswrapper[5081]: E1003 16:10:14.831140 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:10:29 crc kubenswrapper[5081]: I1003 16:10:29.828356 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:10:29 crc kubenswrapper[5081]: E1003 16:10:29.829362 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:10:43 crc kubenswrapper[5081]: I1003 16:10:43.828548 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.536743 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"00aef399bf7092dfab6bb4297a80061dee0999739a29b195a2c12e4c13143a30"} Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.776186 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2gk5w"] Oct 03 16:10:44 crc kubenswrapper[5081]: E1003 16:10:44.776524 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="extract-utilities" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.776537 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="extract-utilities" Oct 03 16:10:44 crc kubenswrapper[5081]: E1003 16:10:44.776576 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="registry-server" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.776583 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="registry-server" Oct 03 16:10:44 crc kubenswrapper[5081]: E1003 16:10:44.776602 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="extract-content" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.776609 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="extract-content" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.776735 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="da8dd0ea-5bbd-4592-af05-4df547cd5306" containerName="registry-server" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.781400 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.804207 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gk5w"] Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.911296 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-utilities\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.911599 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db2fz\" (UniqueName: \"kubernetes.io/projected/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-kube-api-access-db2fz\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:44 crc kubenswrapper[5081]: I1003 16:10:44.912129 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-catalog-content\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.013961 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db2fz\" (UniqueName: \"kubernetes.io/projected/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-kube-api-access-db2fz\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.014141 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-catalog-content\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.014273 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-utilities\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.014824 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-catalog-content\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.014936 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-utilities\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.042329 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db2fz\" (UniqueName: \"kubernetes.io/projected/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-kube-api-access-db2fz\") pod \"redhat-marketplace-2gk5w\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.102956 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:45 crc kubenswrapper[5081]: I1003 16:10:45.592632 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gk5w"] Oct 03 16:10:46 crc kubenswrapper[5081]: I1003 16:10:46.553659 5081 generic.go:334] "Generic (PLEG): container finished" podID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerID="cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b" exitCode=0 Oct 03 16:10:46 crc kubenswrapper[5081]: I1003 16:10:46.553817 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gk5w" event={"ID":"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef","Type":"ContainerDied","Data":"cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b"} Oct 03 16:10:46 crc kubenswrapper[5081]: I1003 16:10:46.554210 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gk5w" event={"ID":"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef","Type":"ContainerStarted","Data":"1dcc51c72d2c3b6e5abf8552668d67c97ce7d04d9fd8672b88bfc9e89f29bf67"} Oct 03 16:10:46 crc kubenswrapper[5081]: I1003 16:10:46.556340 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:10:48 crc kubenswrapper[5081]: I1003 16:10:48.580492 5081 generic.go:334] "Generic (PLEG): container finished" podID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerID="2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031" exitCode=0 Oct 03 16:10:48 crc kubenswrapper[5081]: I1003 16:10:48.580554 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gk5w" event={"ID":"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef","Type":"ContainerDied","Data":"2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031"} Oct 03 16:10:50 crc kubenswrapper[5081]: I1003 16:10:50.603227 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gk5w" event={"ID":"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef","Type":"ContainerStarted","Data":"d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa"} Oct 03 16:10:50 crc kubenswrapper[5081]: I1003 16:10:50.634215 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2gk5w" podStartSLOduration=3.746790093 podStartE2EDuration="6.634184711s" podCreationTimestamp="2025-10-03 16:10:44 +0000 UTC" firstStartedPulling="2025-10-03 16:10:46.555994838 +0000 UTC m=+2565.521551451" lastFinishedPulling="2025-10-03 16:10:49.443389446 +0000 UTC m=+2568.408946069" observedRunningTime="2025-10-03 16:10:50.621987494 +0000 UTC m=+2569.587544117" watchObservedRunningTime="2025-10-03 16:10:50.634184711 +0000 UTC m=+2569.599741334" Oct 03 16:10:55 crc kubenswrapper[5081]: I1003 16:10:55.104069 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:55 crc kubenswrapper[5081]: I1003 16:10:55.104973 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:55 crc kubenswrapper[5081]: I1003 16:10:55.179754 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:55 crc kubenswrapper[5081]: I1003 16:10:55.719866 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:55 crc kubenswrapper[5081]: I1003 16:10:55.800724 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gk5w"] Oct 03 16:10:57 crc kubenswrapper[5081]: I1003 16:10:57.671666 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2gk5w" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="registry-server" containerID="cri-o://d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa" gracePeriod=2 Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.203684 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.368546 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-catalog-content\") pod \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.368706 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-utilities\") pod \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.368825 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-db2fz\" (UniqueName: \"kubernetes.io/projected/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-kube-api-access-db2fz\") pod \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\" (UID: \"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef\") " Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.369810 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-utilities" (OuterVolumeSpecName: "utilities") pod "dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" (UID: "dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.380609 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-kube-api-access-db2fz" (OuterVolumeSpecName: "kube-api-access-db2fz") pod "dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" (UID: "dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef"). InnerVolumeSpecName "kube-api-access-db2fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.384426 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" (UID: "dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.470429 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.470491 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.470503 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-db2fz\" (UniqueName: \"kubernetes.io/projected/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef-kube-api-access-db2fz\") on node \"crc\" DevicePath \"\"" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.688340 5081 generic.go:334] "Generic (PLEG): container finished" podID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerID="d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa" exitCode=0 Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.688402 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gk5w" event={"ID":"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef","Type":"ContainerDied","Data":"d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa"} Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.688442 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2gk5w" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.688471 5081 scope.go:117] "RemoveContainer" containerID="d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.688447 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2gk5w" event={"ID":"dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef","Type":"ContainerDied","Data":"1dcc51c72d2c3b6e5abf8552668d67c97ce7d04d9fd8672b88bfc9e89f29bf67"} Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.730833 5081 scope.go:117] "RemoveContainer" containerID="2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.758937 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gk5w"] Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.770324 5081 scope.go:117] "RemoveContainer" containerID="cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.778226 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2gk5w"] Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.811807 5081 scope.go:117] "RemoveContainer" containerID="d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa" Oct 03 16:10:58 crc kubenswrapper[5081]: E1003 16:10:58.812998 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa\": container with ID starting with d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa not found: ID does not exist" containerID="d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.813055 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa"} err="failed to get container status \"d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa\": rpc error: code = NotFound desc = could not find container \"d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa\": container with ID starting with d291e09888f6438519da963bdfb3316961fa113b1ce0b3efe185c50bf0bc6baa not found: ID does not exist" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.813102 5081 scope.go:117] "RemoveContainer" containerID="2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031" Oct 03 16:10:58 crc kubenswrapper[5081]: E1003 16:10:58.813629 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031\": container with ID starting with 2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031 not found: ID does not exist" containerID="2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.813677 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031"} err="failed to get container status \"2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031\": rpc error: code = NotFound desc = could not find container \"2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031\": container with ID starting with 2277c357c16c4ae001622e937f8c76eb4520b1e722c0bcd0057ef1a4a14df031 not found: ID does not exist" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.813695 5081 scope.go:117] "RemoveContainer" containerID="cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b" Oct 03 16:10:58 crc kubenswrapper[5081]: E1003 16:10:58.814757 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b\": container with ID starting with cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b not found: ID does not exist" containerID="cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b" Oct 03 16:10:58 crc kubenswrapper[5081]: I1003 16:10:58.814973 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b"} err="failed to get container status \"cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b\": rpc error: code = NotFound desc = could not find container \"cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b\": container with ID starting with cb889071854aaaa869586aadf0ea306f409a661536f0bf8294f04ca1727a913b not found: ID does not exist" Oct 03 16:10:59 crc kubenswrapper[5081]: I1003 16:10:59.846343 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" path="/var/lib/kubelet/pods/dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef/volumes" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.424993 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c586p"] Oct 03 16:12:04 crc kubenswrapper[5081]: E1003 16:12:04.426109 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="registry-server" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.426124 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="registry-server" Oct 03 16:12:04 crc kubenswrapper[5081]: E1003 16:12:04.426140 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="extract-content" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.426146 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="extract-content" Oct 03 16:12:04 crc kubenswrapper[5081]: E1003 16:12:04.426178 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="extract-utilities" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.426187 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="extract-utilities" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.426347 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbe53cbe-9b9c-49c7-b0b9-c26cab1224ef" containerName="registry-server" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.427493 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.442223 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c586p"] Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.627863 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrmsj\" (UniqueName: \"kubernetes.io/projected/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-kube-api-access-hrmsj\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.627939 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-catalog-content\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.628631 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-utilities\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.730322 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-utilities\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.730466 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrmsj\" (UniqueName: \"kubernetes.io/projected/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-kube-api-access-hrmsj\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.730524 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-catalog-content\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.730971 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-utilities\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.731375 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-catalog-content\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:04 crc kubenswrapper[5081]: I1003 16:12:04.778294 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrmsj\" (UniqueName: \"kubernetes.io/projected/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-kube-api-access-hrmsj\") pod \"redhat-operators-c586p\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:05 crc kubenswrapper[5081]: I1003 16:12:05.077574 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:05 crc kubenswrapper[5081]: I1003 16:12:05.353169 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c586p"] Oct 03 16:12:05 crc kubenswrapper[5081]: I1003 16:12:05.530484 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c586p" event={"ID":"4f2e4b3d-945b-4a41-94a2-2f9143d490f0","Type":"ContainerStarted","Data":"de61f8aae87339219a5c4ca578490a3c1a04b9528c145980ccdb789d858a74ba"} Oct 03 16:12:06 crc kubenswrapper[5081]: I1003 16:12:06.545826 5081 generic.go:334] "Generic (PLEG): container finished" podID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerID="150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035" exitCode=0 Oct 03 16:12:06 crc kubenswrapper[5081]: I1003 16:12:06.545903 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c586p" event={"ID":"4f2e4b3d-945b-4a41-94a2-2f9143d490f0","Type":"ContainerDied","Data":"150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035"} Oct 03 16:12:07 crc kubenswrapper[5081]: I1003 16:12:07.558586 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c586p" event={"ID":"4f2e4b3d-945b-4a41-94a2-2f9143d490f0","Type":"ContainerStarted","Data":"a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c"} Oct 03 16:12:08 crc kubenswrapper[5081]: I1003 16:12:08.569049 5081 generic.go:334] "Generic (PLEG): container finished" podID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerID="a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c" exitCode=0 Oct 03 16:12:08 crc kubenswrapper[5081]: I1003 16:12:08.569113 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c586p" event={"ID":"4f2e4b3d-945b-4a41-94a2-2f9143d490f0","Type":"ContainerDied","Data":"a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c"} Oct 03 16:12:09 crc kubenswrapper[5081]: I1003 16:12:09.579998 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c586p" event={"ID":"4f2e4b3d-945b-4a41-94a2-2f9143d490f0","Type":"ContainerStarted","Data":"465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d"} Oct 03 16:12:09 crc kubenswrapper[5081]: I1003 16:12:09.603065 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c586p" podStartSLOduration=3.077673582 podStartE2EDuration="5.603046398s" podCreationTimestamp="2025-10-03 16:12:04 +0000 UTC" firstStartedPulling="2025-10-03 16:12:06.549366385 +0000 UTC m=+2645.514923038" lastFinishedPulling="2025-10-03 16:12:09.074739211 +0000 UTC m=+2648.040295854" observedRunningTime="2025-10-03 16:12:09.601021691 +0000 UTC m=+2648.566578314" watchObservedRunningTime="2025-10-03 16:12:09.603046398 +0000 UTC m=+2648.568603011" Oct 03 16:12:15 crc kubenswrapper[5081]: I1003 16:12:15.078207 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:15 crc kubenswrapper[5081]: I1003 16:12:15.079139 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:15 crc kubenswrapper[5081]: I1003 16:12:15.138275 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:15 crc kubenswrapper[5081]: I1003 16:12:15.682943 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:15 crc kubenswrapper[5081]: I1003 16:12:15.748072 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c586p"] Oct 03 16:12:17 crc kubenswrapper[5081]: I1003 16:12:17.658108 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c586p" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="registry-server" containerID="cri-o://465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d" gracePeriod=2 Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.298361 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.321276 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-utilities\") pod \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.321456 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrmsj\" (UniqueName: \"kubernetes.io/projected/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-kube-api-access-hrmsj\") pod \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.321502 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-catalog-content\") pod \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\" (UID: \"4f2e4b3d-945b-4a41-94a2-2f9143d490f0\") " Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.326869 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-utilities" (OuterVolumeSpecName: "utilities") pod "4f2e4b3d-945b-4a41-94a2-2f9143d490f0" (UID: "4f2e4b3d-945b-4a41-94a2-2f9143d490f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.332448 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-kube-api-access-hrmsj" (OuterVolumeSpecName: "kube-api-access-hrmsj") pod "4f2e4b3d-945b-4a41-94a2-2f9143d490f0" (UID: "4f2e4b3d-945b-4a41-94a2-2f9143d490f0"). InnerVolumeSpecName "kube-api-access-hrmsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.423428 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.423461 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrmsj\" (UniqueName: \"kubernetes.io/projected/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-kube-api-access-hrmsj\") on node \"crc\" DevicePath \"\"" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.447143 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f2e4b3d-945b-4a41-94a2-2f9143d490f0" (UID: "4f2e4b3d-945b-4a41-94a2-2f9143d490f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.525130 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f2e4b3d-945b-4a41-94a2-2f9143d490f0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.681836 5081 generic.go:334] "Generic (PLEG): container finished" podID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerID="465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d" exitCode=0 Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.681896 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c586p" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.681919 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c586p" event={"ID":"4f2e4b3d-945b-4a41-94a2-2f9143d490f0","Type":"ContainerDied","Data":"465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d"} Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.682077 5081 scope.go:117] "RemoveContainer" containerID="465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.681975 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c586p" event={"ID":"4f2e4b3d-945b-4a41-94a2-2f9143d490f0","Type":"ContainerDied","Data":"de61f8aae87339219a5c4ca578490a3c1a04b9528c145980ccdb789d858a74ba"} Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.723950 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c586p"] Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.729975 5081 scope.go:117] "RemoveContainer" containerID="a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.736884 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c586p"] Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.762125 5081 scope.go:117] "RemoveContainer" containerID="150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.786979 5081 scope.go:117] "RemoveContainer" containerID="465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d" Oct 03 16:12:19 crc kubenswrapper[5081]: E1003 16:12:19.787629 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d\": container with ID starting with 465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d not found: ID does not exist" containerID="465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.787696 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d"} err="failed to get container status \"465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d\": rpc error: code = NotFound desc = could not find container \"465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d\": container with ID starting with 465b00f58c477c839dc88b82dae20902152c5054b15b54915866faf188a1af1d not found: ID does not exist" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.787738 5081 scope.go:117] "RemoveContainer" containerID="a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c" Oct 03 16:12:19 crc kubenswrapper[5081]: E1003 16:12:19.790931 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c\": container with ID starting with a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c not found: ID does not exist" containerID="a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.790973 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c"} err="failed to get container status \"a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c\": rpc error: code = NotFound desc = could not find container \"a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c\": container with ID starting with a5277a7cbede94665d95232ab282fe9ad3e8382c7677b03cb3bdc82091c97f5c not found: ID does not exist" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.791001 5081 scope.go:117] "RemoveContainer" containerID="150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035" Oct 03 16:12:19 crc kubenswrapper[5081]: E1003 16:12:19.791455 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035\": container with ID starting with 150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035 not found: ID does not exist" containerID="150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.791494 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035"} err="failed to get container status \"150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035\": rpc error: code = NotFound desc = could not find container \"150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035\": container with ID starting with 150f61db0662b907420e05fa3fcf8769078b9daeee502395194f33dfd849b035 not found: ID does not exist" Oct 03 16:12:19 crc kubenswrapper[5081]: I1003 16:12:19.838413 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" path="/var/lib/kubelet/pods/4f2e4b3d-945b-4a41-94a2-2f9143d490f0/volumes" Oct 03 16:13:00 crc kubenswrapper[5081]: I1003 16:13:00.647943 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:13:00 crc kubenswrapper[5081]: I1003 16:13:00.648846 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:13:30 crc kubenswrapper[5081]: I1003 16:13:30.647912 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:13:30 crc kubenswrapper[5081]: I1003 16:13:30.648741 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:14:00 crc kubenswrapper[5081]: I1003 16:14:00.647654 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:14:00 crc kubenswrapper[5081]: I1003 16:14:00.648456 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:14:00 crc kubenswrapper[5081]: I1003 16:14:00.648528 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:14:00 crc kubenswrapper[5081]: I1003 16:14:00.649345 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"00aef399bf7092dfab6bb4297a80061dee0999739a29b195a2c12e4c13143a30"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:14:00 crc kubenswrapper[5081]: I1003 16:14:00.649422 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://00aef399bf7092dfab6bb4297a80061dee0999739a29b195a2c12e4c13143a30" gracePeriod=600 Oct 03 16:14:01 crc kubenswrapper[5081]: I1003 16:14:01.611661 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="00aef399bf7092dfab6bb4297a80061dee0999739a29b195a2c12e4c13143a30" exitCode=0 Oct 03 16:14:01 crc kubenswrapper[5081]: I1003 16:14:01.611733 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"00aef399bf7092dfab6bb4297a80061dee0999739a29b195a2c12e4c13143a30"} Oct 03 16:14:01 crc kubenswrapper[5081]: I1003 16:14:01.612615 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030"} Oct 03 16:14:01 crc kubenswrapper[5081]: I1003 16:14:01.612642 5081 scope.go:117] "RemoveContainer" containerID="f17a94782012d42c327d29884012622ef080a4647614e25b42475e483427525a" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.645427 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h4fws"] Oct 03 16:14:33 crc kubenswrapper[5081]: E1003 16:14:33.646681 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="registry-server" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.646700 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="registry-server" Oct 03 16:14:33 crc kubenswrapper[5081]: E1003 16:14:33.646728 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="extract-utilities" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.646737 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="extract-utilities" Oct 03 16:14:33 crc kubenswrapper[5081]: E1003 16:14:33.646755 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="extract-content" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.646764 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="extract-content" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.647020 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f2e4b3d-945b-4a41-94a2-2f9143d490f0" containerName="registry-server" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.648419 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.664001 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h4fws"] Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.779373 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-kube-api-access-z4gvf\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.779431 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-catalog-content\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.779789 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-utilities\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.881409 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-kube-api-access-z4gvf\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.881466 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-catalog-content\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.881539 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-utilities\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.882106 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-utilities\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.882517 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-catalog-content\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.911172 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-kube-api-access-z4gvf\") pod \"community-operators-h4fws\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:33 crc kubenswrapper[5081]: I1003 16:14:33.983018 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:34 crc kubenswrapper[5081]: I1003 16:14:34.562480 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h4fws"] Oct 03 16:14:34 crc kubenswrapper[5081]: I1003 16:14:34.977053 5081 generic.go:334] "Generic (PLEG): container finished" podID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerID="0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579" exitCode=0 Oct 03 16:14:34 crc kubenswrapper[5081]: I1003 16:14:34.977124 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h4fws" event={"ID":"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c","Type":"ContainerDied","Data":"0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579"} Oct 03 16:14:34 crc kubenswrapper[5081]: I1003 16:14:34.977153 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h4fws" event={"ID":"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c","Type":"ContainerStarted","Data":"3150d2e435cc08165516e9d694910fed0ec5080423cd548d79e3e0ede5066c2e"} Oct 03 16:14:37 crc kubenswrapper[5081]: I1003 16:14:37.001525 5081 generic.go:334] "Generic (PLEG): container finished" podID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerID="c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b" exitCode=0 Oct 03 16:14:37 crc kubenswrapper[5081]: I1003 16:14:37.001598 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h4fws" event={"ID":"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c","Type":"ContainerDied","Data":"c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b"} Oct 03 16:14:38 crc kubenswrapper[5081]: I1003 16:14:38.014651 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h4fws" event={"ID":"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c","Type":"ContainerStarted","Data":"3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42"} Oct 03 16:14:38 crc kubenswrapper[5081]: I1003 16:14:38.041219 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h4fws" podStartSLOduration=2.510916038 podStartE2EDuration="5.041200997s" podCreationTimestamp="2025-10-03 16:14:33 +0000 UTC" firstStartedPulling="2025-10-03 16:14:34.978534984 +0000 UTC m=+2793.944091597" lastFinishedPulling="2025-10-03 16:14:37.508819913 +0000 UTC m=+2796.474376556" observedRunningTime="2025-10-03 16:14:38.034383653 +0000 UTC m=+2796.999940306" watchObservedRunningTime="2025-10-03 16:14:38.041200997 +0000 UTC m=+2797.006757610" Oct 03 16:14:43 crc kubenswrapper[5081]: I1003 16:14:43.983688 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:43 crc kubenswrapper[5081]: I1003 16:14:43.984767 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:44 crc kubenswrapper[5081]: I1003 16:14:44.047782 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:44 crc kubenswrapper[5081]: I1003 16:14:44.127410 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:44 crc kubenswrapper[5081]: I1003 16:14:44.292309 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h4fws"] Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.086914 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h4fws" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="registry-server" containerID="cri-o://3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42" gracePeriod=2 Oct 03 16:14:46 crc kubenswrapper[5081]: E1003 16:14:46.122159 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2e9e191_ac01_4fac_8006_ea4bc3d9d26c.slice/crio-conmon-3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2e9e191_ac01_4fac_8006_ea4bc3d9d26c.slice/crio-3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42.scope\": RecentStats: unable to find data in memory cache]" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.497309 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.513669 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-utilities\") pod \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.513851 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-catalog-content\") pod \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.513933 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-kube-api-access-z4gvf\") pod \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\" (UID: \"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c\") " Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.515974 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-utilities" (OuterVolumeSpecName: "utilities") pod "e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" (UID: "e2e9e191-ac01-4fac-8006-ea4bc3d9d26c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.522355 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-kube-api-access-z4gvf" (OuterVolumeSpecName: "kube-api-access-z4gvf") pod "e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" (UID: "e2e9e191-ac01-4fac-8006-ea4bc3d9d26c"). InnerVolumeSpecName "kube-api-access-z4gvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.581158 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" (UID: "e2e9e191-ac01-4fac-8006-ea4bc3d9d26c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.615550 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.615620 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.615638 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4gvf\" (UniqueName: \"kubernetes.io/projected/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c-kube-api-access-z4gvf\") on node \"crc\" DevicePath \"\"" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.715730 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ghkdt"] Oct 03 16:14:46 crc kubenswrapper[5081]: E1003 16:14:46.716260 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="extract-utilities" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.716281 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="extract-utilities" Oct 03 16:14:46 crc kubenswrapper[5081]: E1003 16:14:46.716317 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="registry-server" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.716326 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="registry-server" Oct 03 16:14:46 crc kubenswrapper[5081]: E1003 16:14:46.716344 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="extract-content" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.716351 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="extract-content" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.716524 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerName="registry-server" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.718145 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.726824 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ghkdt"] Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.818597 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-utilities\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.818702 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbxmd\" (UniqueName: \"kubernetes.io/projected/64d59129-a170-42a4-a3c8-e8aed15e1988-kube-api-access-jbxmd\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.818738 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-catalog-content\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.920295 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbxmd\" (UniqueName: \"kubernetes.io/projected/64d59129-a170-42a4-a3c8-e8aed15e1988-kube-api-access-jbxmd\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.920355 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-catalog-content\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.920437 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-utilities\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.921091 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-utilities\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.921864 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-catalog-content\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:46 crc kubenswrapper[5081]: I1003 16:14:46.944986 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbxmd\" (UniqueName: \"kubernetes.io/projected/64d59129-a170-42a4-a3c8-e8aed15e1988-kube-api-access-jbxmd\") pod \"certified-operators-ghkdt\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.055814 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.106025 5081 generic.go:334] "Generic (PLEG): container finished" podID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" containerID="3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42" exitCode=0 Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.106381 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h4fws" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.106417 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h4fws" event={"ID":"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c","Type":"ContainerDied","Data":"3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42"} Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.110680 5081 scope.go:117] "RemoveContainer" containerID="3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.110544 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h4fws" event={"ID":"e2e9e191-ac01-4fac-8006-ea4bc3d9d26c","Type":"ContainerDied","Data":"3150d2e435cc08165516e9d694910fed0ec5080423cd548d79e3e0ede5066c2e"} Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.157666 5081 scope.go:117] "RemoveContainer" containerID="c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.157738 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h4fws"] Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.162892 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h4fws"] Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.213967 5081 scope.go:117] "RemoveContainer" containerID="0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.250677 5081 scope.go:117] "RemoveContainer" containerID="3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42" Oct 03 16:14:47 crc kubenswrapper[5081]: E1003 16:14:47.251285 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42\": container with ID starting with 3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42 not found: ID does not exist" containerID="3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.251339 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42"} err="failed to get container status \"3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42\": rpc error: code = NotFound desc = could not find container \"3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42\": container with ID starting with 3821021c6d4b683a03808fb82d73af1b81f45a265dc02847c42f3b76ae57ed42 not found: ID does not exist" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.251373 5081 scope.go:117] "RemoveContainer" containerID="c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b" Oct 03 16:14:47 crc kubenswrapper[5081]: E1003 16:14:47.251794 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b\": container with ID starting with c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b not found: ID does not exist" containerID="c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.251846 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b"} err="failed to get container status \"c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b\": rpc error: code = NotFound desc = could not find container \"c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b\": container with ID starting with c56d99d385b1a129347f5937f75a2c159ebf5978f148c0a4dade7e2457918c9b not found: ID does not exist" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.251884 5081 scope.go:117] "RemoveContainer" containerID="0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579" Oct 03 16:14:47 crc kubenswrapper[5081]: E1003 16:14:47.252202 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579\": container with ID starting with 0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579 not found: ID does not exist" containerID="0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.252249 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579"} err="failed to get container status \"0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579\": rpc error: code = NotFound desc = could not find container \"0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579\": container with ID starting with 0d4f53f0b38c0ccd9534f89a87ede0e0388fa2a69c78774b143b5cd7d5937579 not found: ID does not exist" Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.580867 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ghkdt"] Oct 03 16:14:47 crc kubenswrapper[5081]: I1003 16:14:47.841036 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e9e191-ac01-4fac-8006-ea4bc3d9d26c" path="/var/lib/kubelet/pods/e2e9e191-ac01-4fac-8006-ea4bc3d9d26c/volumes" Oct 03 16:14:48 crc kubenswrapper[5081]: I1003 16:14:48.117762 5081 generic.go:334] "Generic (PLEG): container finished" podID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerID="a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197" exitCode=0 Oct 03 16:14:48 crc kubenswrapper[5081]: I1003 16:14:48.117885 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghkdt" event={"ID":"64d59129-a170-42a4-a3c8-e8aed15e1988","Type":"ContainerDied","Data":"a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197"} Oct 03 16:14:48 crc kubenswrapper[5081]: I1003 16:14:48.119833 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghkdt" event={"ID":"64d59129-a170-42a4-a3c8-e8aed15e1988","Type":"ContainerStarted","Data":"77acacab0b5a102829563008d8b69be9215ec0cbca69dc488fbd8ef631736087"} Oct 03 16:14:49 crc kubenswrapper[5081]: I1003 16:14:49.130729 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghkdt" event={"ID":"64d59129-a170-42a4-a3c8-e8aed15e1988","Type":"ContainerStarted","Data":"068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed"} Oct 03 16:14:50 crc kubenswrapper[5081]: I1003 16:14:50.145274 5081 generic.go:334] "Generic (PLEG): container finished" podID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerID="068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed" exitCode=0 Oct 03 16:14:50 crc kubenswrapper[5081]: I1003 16:14:50.145340 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghkdt" event={"ID":"64d59129-a170-42a4-a3c8-e8aed15e1988","Type":"ContainerDied","Data":"068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed"} Oct 03 16:14:51 crc kubenswrapper[5081]: I1003 16:14:51.158384 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghkdt" event={"ID":"64d59129-a170-42a4-a3c8-e8aed15e1988","Type":"ContainerStarted","Data":"2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690"} Oct 03 16:14:51 crc kubenswrapper[5081]: I1003 16:14:51.183625 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ghkdt" podStartSLOduration=2.470066864 podStartE2EDuration="5.183598346s" podCreationTimestamp="2025-10-03 16:14:46 +0000 UTC" firstStartedPulling="2025-10-03 16:14:48.119362548 +0000 UTC m=+2807.084919161" lastFinishedPulling="2025-10-03 16:14:50.832894 +0000 UTC m=+2809.798450643" observedRunningTime="2025-10-03 16:14:51.181401624 +0000 UTC m=+2810.146958267" watchObservedRunningTime="2025-10-03 16:14:51.183598346 +0000 UTC m=+2810.149154969" Oct 03 16:14:57 crc kubenswrapper[5081]: I1003 16:14:57.056344 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:57 crc kubenswrapper[5081]: I1003 16:14:57.057214 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:57 crc kubenswrapper[5081]: I1003 16:14:57.116683 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:57 crc kubenswrapper[5081]: I1003 16:14:57.273094 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:57 crc kubenswrapper[5081]: I1003 16:14:57.363088 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ghkdt"] Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.241495 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ghkdt" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="registry-server" containerID="cri-o://2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690" gracePeriod=2 Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.614177 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.745743 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbxmd\" (UniqueName: \"kubernetes.io/projected/64d59129-a170-42a4-a3c8-e8aed15e1988-kube-api-access-jbxmd\") pod \"64d59129-a170-42a4-a3c8-e8aed15e1988\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.745814 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-catalog-content\") pod \"64d59129-a170-42a4-a3c8-e8aed15e1988\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.745982 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-utilities\") pod \"64d59129-a170-42a4-a3c8-e8aed15e1988\" (UID: \"64d59129-a170-42a4-a3c8-e8aed15e1988\") " Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.747803 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-utilities" (OuterVolumeSpecName: "utilities") pod "64d59129-a170-42a4-a3c8-e8aed15e1988" (UID: "64d59129-a170-42a4-a3c8-e8aed15e1988"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.752770 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64d59129-a170-42a4-a3c8-e8aed15e1988-kube-api-access-jbxmd" (OuterVolumeSpecName: "kube-api-access-jbxmd") pod "64d59129-a170-42a4-a3c8-e8aed15e1988" (UID: "64d59129-a170-42a4-a3c8-e8aed15e1988"). InnerVolumeSpecName "kube-api-access-jbxmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.848196 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbxmd\" (UniqueName: \"kubernetes.io/projected/64d59129-a170-42a4-a3c8-e8aed15e1988-kube-api-access-jbxmd\") on node \"crc\" DevicePath \"\"" Oct 03 16:14:59 crc kubenswrapper[5081]: I1003 16:14:59.848644 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.031176 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64d59129-a170-42a4-a3c8-e8aed15e1988" (UID: "64d59129-a170-42a4-a3c8-e8aed15e1988"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.051307 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64d59129-a170-42a4-a3c8-e8aed15e1988-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.167177 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx"] Oct 03 16:15:00 crc kubenswrapper[5081]: E1003 16:15:00.167656 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="extract-utilities" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.167682 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="extract-utilities" Oct 03 16:15:00 crc kubenswrapper[5081]: E1003 16:15:00.167712 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="registry-server" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.167722 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="registry-server" Oct 03 16:15:00 crc kubenswrapper[5081]: E1003 16:15:00.167758 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="extract-content" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.167768 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="extract-content" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.168015 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerName="registry-server" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.169009 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.171977 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.180043 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx"] Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.180871 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.252760 5081 generic.go:334] "Generic (PLEG): container finished" podID="64d59129-a170-42a4-a3c8-e8aed15e1988" containerID="2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690" exitCode=0 Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.252832 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghkdt" event={"ID":"64d59129-a170-42a4-a3c8-e8aed15e1988","Type":"ContainerDied","Data":"2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690"} Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.252858 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghkdt" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.252905 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghkdt" event={"ID":"64d59129-a170-42a4-a3c8-e8aed15e1988","Type":"ContainerDied","Data":"77acacab0b5a102829563008d8b69be9215ec0cbca69dc488fbd8ef631736087"} Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.252939 5081 scope.go:117] "RemoveContainer" containerID="2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.274694 5081 scope.go:117] "RemoveContainer" containerID="068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.288845 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ghkdt"] Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.305976 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ghkdt"] Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.316314 5081 scope.go:117] "RemoveContainer" containerID="a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.338077 5081 scope.go:117] "RemoveContainer" containerID="2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690" Oct 03 16:15:00 crc kubenswrapper[5081]: E1003 16:15:00.338689 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690\": container with ID starting with 2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690 not found: ID does not exist" containerID="2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.338766 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690"} err="failed to get container status \"2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690\": rpc error: code = NotFound desc = could not find container \"2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690\": container with ID starting with 2ae03f63f74e1542cf453e23e66b38bfa39e901c8abb52a9c4cfdac66920c690 not found: ID does not exist" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.338811 5081 scope.go:117] "RemoveContainer" containerID="068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed" Oct 03 16:15:00 crc kubenswrapper[5081]: E1003 16:15:00.339314 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed\": container with ID starting with 068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed not found: ID does not exist" containerID="068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.339342 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed"} err="failed to get container status \"068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed\": rpc error: code = NotFound desc = could not find container \"068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed\": container with ID starting with 068a482a9f052d2d8a283b86128581654d28b17ee8265768f59f42c8c64f72ed not found: ID does not exist" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.339357 5081 scope.go:117] "RemoveContainer" containerID="a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197" Oct 03 16:15:00 crc kubenswrapper[5081]: E1003 16:15:00.339717 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197\": container with ID starting with a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197 not found: ID does not exist" containerID="a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.339741 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197"} err="failed to get container status \"a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197\": rpc error: code = NotFound desc = could not find container \"a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197\": container with ID starting with a743b57194fcd8805356d85e6812d2d24f065d5b0901debde22667eb541aa197 not found: ID does not exist" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.355305 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c534ff6f-e775-430d-a9e8-9696361bb3f9-config-volume\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.355940 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjxdr\" (UniqueName: \"kubernetes.io/projected/c534ff6f-e775-430d-a9e8-9696361bb3f9-kube-api-access-cjxdr\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.356058 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c534ff6f-e775-430d-a9e8-9696361bb3f9-secret-volume\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.457291 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjxdr\" (UniqueName: \"kubernetes.io/projected/c534ff6f-e775-430d-a9e8-9696361bb3f9-kube-api-access-cjxdr\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.457355 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c534ff6f-e775-430d-a9e8-9696361bb3f9-secret-volume\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.457418 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c534ff6f-e775-430d-a9e8-9696361bb3f9-config-volume\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.458633 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c534ff6f-e775-430d-a9e8-9696361bb3f9-config-volume\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.462407 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c534ff6f-e775-430d-a9e8-9696361bb3f9-secret-volume\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.478260 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjxdr\" (UniqueName: \"kubernetes.io/projected/c534ff6f-e775-430d-a9e8-9696361bb3f9-kube-api-access-cjxdr\") pod \"collect-profiles-29325135-xdsgx\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.504689 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:00 crc kubenswrapper[5081]: I1003 16:15:00.755674 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx"] Oct 03 16:15:01 crc kubenswrapper[5081]: I1003 16:15:01.266161 5081 generic.go:334] "Generic (PLEG): container finished" podID="c534ff6f-e775-430d-a9e8-9696361bb3f9" containerID="236b4a712df095b08f2943b8a9693cb096accd6cfe4e3f84820245bd639126fa" exitCode=0 Oct 03 16:15:01 crc kubenswrapper[5081]: I1003 16:15:01.266234 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" event={"ID":"c534ff6f-e775-430d-a9e8-9696361bb3f9","Type":"ContainerDied","Data":"236b4a712df095b08f2943b8a9693cb096accd6cfe4e3f84820245bd639126fa"} Oct 03 16:15:01 crc kubenswrapper[5081]: I1003 16:15:01.266315 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" event={"ID":"c534ff6f-e775-430d-a9e8-9696361bb3f9","Type":"ContainerStarted","Data":"8e3642a1003375f3d68fd95d31a39efc9997bab54a92733641efdfec2c0b19b5"} Oct 03 16:15:01 crc kubenswrapper[5081]: I1003 16:15:01.841929 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64d59129-a170-42a4-a3c8-e8aed15e1988" path="/var/lib/kubelet/pods/64d59129-a170-42a4-a3c8-e8aed15e1988/volumes" Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.539986 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.692173 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c534ff6f-e775-430d-a9e8-9696361bb3f9-config-volume\") pod \"c534ff6f-e775-430d-a9e8-9696361bb3f9\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.692713 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c534ff6f-e775-430d-a9e8-9696361bb3f9-secret-volume\") pod \"c534ff6f-e775-430d-a9e8-9696361bb3f9\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.692773 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjxdr\" (UniqueName: \"kubernetes.io/projected/c534ff6f-e775-430d-a9e8-9696361bb3f9-kube-api-access-cjxdr\") pod \"c534ff6f-e775-430d-a9e8-9696361bb3f9\" (UID: \"c534ff6f-e775-430d-a9e8-9696361bb3f9\") " Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.692912 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c534ff6f-e775-430d-a9e8-9696361bb3f9-config-volume" (OuterVolumeSpecName: "config-volume") pod "c534ff6f-e775-430d-a9e8-9696361bb3f9" (UID: "c534ff6f-e775-430d-a9e8-9696361bb3f9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.693864 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c534ff6f-e775-430d-a9e8-9696361bb3f9-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.700730 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c534ff6f-e775-430d-a9e8-9696361bb3f9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c534ff6f-e775-430d-a9e8-9696361bb3f9" (UID: "c534ff6f-e775-430d-a9e8-9696361bb3f9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.700778 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c534ff6f-e775-430d-a9e8-9696361bb3f9-kube-api-access-cjxdr" (OuterVolumeSpecName: "kube-api-access-cjxdr") pod "c534ff6f-e775-430d-a9e8-9696361bb3f9" (UID: "c534ff6f-e775-430d-a9e8-9696361bb3f9"). InnerVolumeSpecName "kube-api-access-cjxdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.795658 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c534ff6f-e775-430d-a9e8-9696361bb3f9-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:15:02 crc kubenswrapper[5081]: I1003 16:15:02.795690 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjxdr\" (UniqueName: \"kubernetes.io/projected/c534ff6f-e775-430d-a9e8-9696361bb3f9-kube-api-access-cjxdr\") on node \"crc\" DevicePath \"\"" Oct 03 16:15:03 crc kubenswrapper[5081]: I1003 16:15:03.283552 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" event={"ID":"c534ff6f-e775-430d-a9e8-9696361bb3f9","Type":"ContainerDied","Data":"8e3642a1003375f3d68fd95d31a39efc9997bab54a92733641efdfec2c0b19b5"} Oct 03 16:15:03 crc kubenswrapper[5081]: I1003 16:15:03.283638 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e3642a1003375f3d68fd95d31a39efc9997bab54a92733641efdfec2c0b19b5" Oct 03 16:15:03 crc kubenswrapper[5081]: I1003 16:15:03.283645 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx" Oct 03 16:15:03 crc kubenswrapper[5081]: I1003 16:15:03.622926 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r"] Oct 03 16:15:03 crc kubenswrapper[5081]: I1003 16:15:03.630357 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325090-jzr7r"] Oct 03 16:15:03 crc kubenswrapper[5081]: I1003 16:15:03.840689 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24a2b864-5a51-4283-8d44-578a6d40a6ce" path="/var/lib/kubelet/pods/24a2b864-5a51-4283-8d44-578a6d40a6ce/volumes" Oct 03 16:15:21 crc kubenswrapper[5081]: I1003 16:15:21.414213 5081 scope.go:117] "RemoveContainer" containerID="d8cc2441878ec63c9755189a7a4fc6e113058f5c063937ca8f083e28a7584d2c" Oct 03 16:16:00 crc kubenswrapper[5081]: I1003 16:16:00.647310 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:16:00 crc kubenswrapper[5081]: I1003 16:16:00.648811 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:16:30 crc kubenswrapper[5081]: I1003 16:16:30.648107 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:16:30 crc kubenswrapper[5081]: I1003 16:16:30.649162 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:17:00 crc kubenswrapper[5081]: I1003 16:17:00.647131 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:17:00 crc kubenswrapper[5081]: I1003 16:17:00.647756 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:17:00 crc kubenswrapper[5081]: I1003 16:17:00.647809 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:17:00 crc kubenswrapper[5081]: I1003 16:17:00.648480 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:17:00 crc kubenswrapper[5081]: I1003 16:17:00.648535 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" gracePeriod=600 Oct 03 16:17:00 crc kubenswrapper[5081]: E1003 16:17:00.773437 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:17:01 crc kubenswrapper[5081]: I1003 16:17:01.316074 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" exitCode=0 Oct 03 16:17:01 crc kubenswrapper[5081]: I1003 16:17:01.316185 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030"} Oct 03 16:17:01 crc kubenswrapper[5081]: I1003 16:17:01.316682 5081 scope.go:117] "RemoveContainer" containerID="00aef399bf7092dfab6bb4297a80061dee0999739a29b195a2c12e4c13143a30" Oct 03 16:17:01 crc kubenswrapper[5081]: I1003 16:17:01.317428 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:17:01 crc kubenswrapper[5081]: E1003 16:17:01.317806 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:17:14 crc kubenswrapper[5081]: I1003 16:17:14.828034 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:17:14 crc kubenswrapper[5081]: E1003 16:17:14.829134 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:17:25 crc kubenswrapper[5081]: I1003 16:17:25.827844 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:17:25 crc kubenswrapper[5081]: E1003 16:17:25.828811 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:17:37 crc kubenswrapper[5081]: I1003 16:17:37.828173 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:17:37 crc kubenswrapper[5081]: E1003 16:17:37.829118 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:17:52 crc kubenswrapper[5081]: I1003 16:17:52.828307 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:17:52 crc kubenswrapper[5081]: E1003 16:17:52.829274 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:18:04 crc kubenswrapper[5081]: I1003 16:18:04.827943 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:18:04 crc kubenswrapper[5081]: E1003 16:18:04.829639 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:18:16 crc kubenswrapper[5081]: I1003 16:18:16.827510 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:18:16 crc kubenswrapper[5081]: E1003 16:18:16.828707 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:18:30 crc kubenswrapper[5081]: I1003 16:18:30.828544 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:18:30 crc kubenswrapper[5081]: E1003 16:18:30.829773 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:18:41 crc kubenswrapper[5081]: I1003 16:18:41.835234 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:18:41 crc kubenswrapper[5081]: E1003 16:18:41.837491 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:18:53 crc kubenswrapper[5081]: I1003 16:18:53.827928 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:18:53 crc kubenswrapper[5081]: E1003 16:18:53.829241 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:19:07 crc kubenswrapper[5081]: I1003 16:19:07.828187 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:19:07 crc kubenswrapper[5081]: E1003 16:19:07.829220 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:19:18 crc kubenswrapper[5081]: I1003 16:19:18.828211 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:19:18 crc kubenswrapper[5081]: E1003 16:19:18.829338 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:19:30 crc kubenswrapper[5081]: I1003 16:19:30.827921 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:19:30 crc kubenswrapper[5081]: E1003 16:19:30.829481 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:19:45 crc kubenswrapper[5081]: I1003 16:19:45.828123 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:19:45 crc kubenswrapper[5081]: E1003 16:19:45.830168 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:19:59 crc kubenswrapper[5081]: I1003 16:19:59.828102 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:19:59 crc kubenswrapper[5081]: E1003 16:19:59.829261 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:20:12 crc kubenswrapper[5081]: I1003 16:20:12.828063 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:20:12 crc kubenswrapper[5081]: E1003 16:20:12.829122 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:20:24 crc kubenswrapper[5081]: I1003 16:20:24.828694 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:20:24 crc kubenswrapper[5081]: E1003 16:20:24.829714 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:20:38 crc kubenswrapper[5081]: I1003 16:20:38.827521 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:20:38 crc kubenswrapper[5081]: E1003 16:20:38.828381 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:20:51 crc kubenswrapper[5081]: I1003 16:20:51.832265 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:20:51 crc kubenswrapper[5081]: E1003 16:20:51.833107 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:21:05 crc kubenswrapper[5081]: I1003 16:21:05.827643 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:21:05 crc kubenswrapper[5081]: E1003 16:21:05.828830 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:21:20 crc kubenswrapper[5081]: I1003 16:21:20.828207 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:21:20 crc kubenswrapper[5081]: E1003 16:21:20.830040 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.471846 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xrvlx"] Oct 03 16:21:21 crc kubenswrapper[5081]: E1003 16:21:21.472218 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c534ff6f-e775-430d-a9e8-9696361bb3f9" containerName="collect-profiles" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.472234 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c534ff6f-e775-430d-a9e8-9696361bb3f9" containerName="collect-profiles" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.472397 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c534ff6f-e775-430d-a9e8-9696361bb3f9" containerName="collect-profiles" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.473466 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.495106 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrvlx"] Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.554803 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-catalog-content\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.554902 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv6zd\" (UniqueName: \"kubernetes.io/projected/04ebcd3a-3b32-4f47-835c-e7644096a894-kube-api-access-qv6zd\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.555109 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-utilities\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.656408 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv6zd\" (UniqueName: \"kubernetes.io/projected/04ebcd3a-3b32-4f47-835c-e7644096a894-kube-api-access-qv6zd\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.656482 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-utilities\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.656544 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-catalog-content\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.657662 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-utilities\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.658304 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-catalog-content\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.680237 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv6zd\" (UniqueName: \"kubernetes.io/projected/04ebcd3a-3b32-4f47-835c-e7644096a894-kube-api-access-qv6zd\") pod \"redhat-marketplace-xrvlx\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:21 crc kubenswrapper[5081]: I1003 16:21:21.798655 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:22 crc kubenswrapper[5081]: I1003 16:21:22.246134 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrvlx"] Oct 03 16:21:22 crc kubenswrapper[5081]: I1003 16:21:22.563089 5081 generic.go:334] "Generic (PLEG): container finished" podID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerID="05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf" exitCode=0 Oct 03 16:21:22 crc kubenswrapper[5081]: I1003 16:21:22.563219 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrvlx" event={"ID":"04ebcd3a-3b32-4f47-835c-e7644096a894","Type":"ContainerDied","Data":"05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf"} Oct 03 16:21:22 crc kubenswrapper[5081]: I1003 16:21:22.563860 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrvlx" event={"ID":"04ebcd3a-3b32-4f47-835c-e7644096a894","Type":"ContainerStarted","Data":"b6aed6b88fa7a3667c270a34595af6f4a3b270f273ea7298c8fab71e5b6f1895"} Oct 03 16:21:22 crc kubenswrapper[5081]: I1003 16:21:22.565590 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:21:24 crc kubenswrapper[5081]: E1003 16:21:24.147346 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04ebcd3a_3b32_4f47_835c_e7644096a894.slice/crio-ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc.scope\": RecentStats: unable to find data in memory cache]" Oct 03 16:21:24 crc kubenswrapper[5081]: I1003 16:21:24.583293 5081 generic.go:334] "Generic (PLEG): container finished" podID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerID="ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc" exitCode=0 Oct 03 16:21:24 crc kubenswrapper[5081]: I1003 16:21:24.583354 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrvlx" event={"ID":"04ebcd3a-3b32-4f47-835c-e7644096a894","Type":"ContainerDied","Data":"ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc"} Oct 03 16:21:25 crc kubenswrapper[5081]: I1003 16:21:25.592323 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrvlx" event={"ID":"04ebcd3a-3b32-4f47-835c-e7644096a894","Type":"ContainerStarted","Data":"c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e"} Oct 03 16:21:25 crc kubenswrapper[5081]: I1003 16:21:25.612016 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xrvlx" podStartSLOduration=2.058423374 podStartE2EDuration="4.611986599s" podCreationTimestamp="2025-10-03 16:21:21 +0000 UTC" firstStartedPulling="2025-10-03 16:21:22.565292931 +0000 UTC m=+3201.530849544" lastFinishedPulling="2025-10-03 16:21:25.118856156 +0000 UTC m=+3204.084412769" observedRunningTime="2025-10-03 16:21:25.610679551 +0000 UTC m=+3204.576236174" watchObservedRunningTime="2025-10-03 16:21:25.611986599 +0000 UTC m=+3204.577543212" Oct 03 16:21:31 crc kubenswrapper[5081]: I1003 16:21:31.799337 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:31 crc kubenswrapper[5081]: I1003 16:21:31.799974 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:31 crc kubenswrapper[5081]: I1003 16:21:31.847926 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:32 crc kubenswrapper[5081]: I1003 16:21:32.686124 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:32 crc kubenswrapper[5081]: I1003 16:21:32.746208 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrvlx"] Oct 03 16:21:34 crc kubenswrapper[5081]: I1003 16:21:34.665715 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xrvlx" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="registry-server" containerID="cri-o://c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e" gracePeriod=2 Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.151607 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.186949 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv6zd\" (UniqueName: \"kubernetes.io/projected/04ebcd3a-3b32-4f47-835c-e7644096a894-kube-api-access-qv6zd\") pod \"04ebcd3a-3b32-4f47-835c-e7644096a894\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.187215 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-catalog-content\") pod \"04ebcd3a-3b32-4f47-835c-e7644096a894\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.187243 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-utilities\") pod \"04ebcd3a-3b32-4f47-835c-e7644096a894\" (UID: \"04ebcd3a-3b32-4f47-835c-e7644096a894\") " Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.188553 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-utilities" (OuterVolumeSpecName: "utilities") pod "04ebcd3a-3b32-4f47-835c-e7644096a894" (UID: "04ebcd3a-3b32-4f47-835c-e7644096a894"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.195194 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04ebcd3a-3b32-4f47-835c-e7644096a894-kube-api-access-qv6zd" (OuterVolumeSpecName: "kube-api-access-qv6zd") pod "04ebcd3a-3b32-4f47-835c-e7644096a894" (UID: "04ebcd3a-3b32-4f47-835c-e7644096a894"). InnerVolumeSpecName "kube-api-access-qv6zd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.205498 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04ebcd3a-3b32-4f47-835c-e7644096a894" (UID: "04ebcd3a-3b32-4f47-835c-e7644096a894"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.288741 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.288791 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ebcd3a-3b32-4f47-835c-e7644096a894-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.288805 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv6zd\" (UniqueName: \"kubernetes.io/projected/04ebcd3a-3b32-4f47-835c-e7644096a894-kube-api-access-qv6zd\") on node \"crc\" DevicePath \"\"" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.678180 5081 generic.go:334] "Generic (PLEG): container finished" podID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerID="c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e" exitCode=0 Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.678254 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrvlx" event={"ID":"04ebcd3a-3b32-4f47-835c-e7644096a894","Type":"ContainerDied","Data":"c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e"} Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.678804 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrvlx" event={"ID":"04ebcd3a-3b32-4f47-835c-e7644096a894","Type":"ContainerDied","Data":"b6aed6b88fa7a3667c270a34595af6f4a3b270f273ea7298c8fab71e5b6f1895"} Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.678858 5081 scope.go:117] "RemoveContainer" containerID="c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.678292 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrvlx" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.706976 5081 scope.go:117] "RemoveContainer" containerID="ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.724271 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrvlx"] Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.731056 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrvlx"] Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.753735 5081 scope.go:117] "RemoveContainer" containerID="05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.779182 5081 scope.go:117] "RemoveContainer" containerID="c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e" Oct 03 16:21:35 crc kubenswrapper[5081]: E1003 16:21:35.780008 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e\": container with ID starting with c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e not found: ID does not exist" containerID="c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.780071 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e"} err="failed to get container status \"c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e\": rpc error: code = NotFound desc = could not find container \"c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e\": container with ID starting with c6a2a2e11fe57931108f2c8ac28752098821ce3f71b0b49fe81af3714902618e not found: ID does not exist" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.780108 5081 scope.go:117] "RemoveContainer" containerID="ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc" Oct 03 16:21:35 crc kubenswrapper[5081]: E1003 16:21:35.780918 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc\": container with ID starting with ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc not found: ID does not exist" containerID="ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.780989 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc"} err="failed to get container status \"ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc\": rpc error: code = NotFound desc = could not find container \"ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc\": container with ID starting with ee716da9bc560f682876bcbf545cdf2f86a45a22b729e3d6d342c409044118fc not found: ID does not exist" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.781024 5081 scope.go:117] "RemoveContainer" containerID="05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf" Oct 03 16:21:35 crc kubenswrapper[5081]: E1003 16:21:35.781470 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf\": container with ID starting with 05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf not found: ID does not exist" containerID="05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.781513 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf"} err="failed to get container status \"05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf\": rpc error: code = NotFound desc = could not find container \"05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf\": container with ID starting with 05cf42b9f22bab520d0e62450626ffab92e47189c3554cf4cc2bbd8a065980cf not found: ID does not exist" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.827609 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:21:35 crc kubenswrapper[5081]: E1003 16:21:35.827948 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:21:35 crc kubenswrapper[5081]: I1003 16:21:35.841198 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" path="/var/lib/kubelet/pods/04ebcd3a-3b32-4f47-835c-e7644096a894/volumes" Oct 03 16:21:49 crc kubenswrapper[5081]: I1003 16:21:49.828646 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:21:49 crc kubenswrapper[5081]: E1003 16:21:49.829734 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:22:03 crc kubenswrapper[5081]: I1003 16:22:03.827879 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:22:04 crc kubenswrapper[5081]: I1003 16:22:04.942892 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"7aa93fdf8651c00cb8cd3adf1c5bba9a97c18ff95e1bea35fb2897cf8cc080c4"} Oct 03 16:22:53 crc kubenswrapper[5081]: I1003 16:22:53.956966 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p8vwv"] Oct 03 16:22:53 crc kubenswrapper[5081]: E1003 16:22:53.960261 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="extract-content" Oct 03 16:22:53 crc kubenswrapper[5081]: I1003 16:22:53.960363 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="extract-content" Oct 03 16:22:53 crc kubenswrapper[5081]: E1003 16:22:53.960459 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="extract-utilities" Oct 03 16:22:53 crc kubenswrapper[5081]: I1003 16:22:53.960546 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="extract-utilities" Oct 03 16:22:53 crc kubenswrapper[5081]: E1003 16:22:53.960729 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="registry-server" Oct 03 16:22:53 crc kubenswrapper[5081]: I1003 16:22:53.960795 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="registry-server" Oct 03 16:22:53 crc kubenswrapper[5081]: I1003 16:22:53.961171 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ebcd3a-3b32-4f47-835c-e7644096a894" containerName="registry-server" Oct 03 16:22:53 crc kubenswrapper[5081]: I1003 16:22:53.963252 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:53 crc kubenswrapper[5081]: I1003 16:22:53.971824 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8vwv"] Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.124069 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rr7v\" (UniqueName: \"kubernetes.io/projected/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-kube-api-access-5rr7v\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.124224 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-utilities\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.124314 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-catalog-content\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.225465 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-catalog-content\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.225949 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rr7v\" (UniqueName: \"kubernetes.io/projected/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-kube-api-access-5rr7v\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.226128 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-utilities\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.226260 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-catalog-content\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.226673 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-utilities\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.247510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rr7v\" (UniqueName: \"kubernetes.io/projected/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-kube-api-access-5rr7v\") pod \"redhat-operators-p8vwv\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.350244 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:22:54 crc kubenswrapper[5081]: I1003 16:22:54.619359 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8vwv"] Oct 03 16:22:55 crc kubenswrapper[5081]: I1003 16:22:55.339498 5081 generic.go:334] "Generic (PLEG): container finished" podID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerID="23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267" exitCode=0 Oct 03 16:22:55 crc kubenswrapper[5081]: I1003 16:22:55.339616 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8vwv" event={"ID":"d7f1a1ff-1ed9-4af5-8c03-324139a8952f","Type":"ContainerDied","Data":"23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267"} Oct 03 16:22:55 crc kubenswrapper[5081]: I1003 16:22:55.339878 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8vwv" event={"ID":"d7f1a1ff-1ed9-4af5-8c03-324139a8952f","Type":"ContainerStarted","Data":"ac8bcae9e732132a6675fa8aff98ae0f2407d5557ee15c95abfaa98df9ad7079"} Oct 03 16:22:57 crc kubenswrapper[5081]: I1003 16:22:57.360031 5081 generic.go:334] "Generic (PLEG): container finished" podID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerID="1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c" exitCode=0 Oct 03 16:22:57 crc kubenswrapper[5081]: I1003 16:22:57.360455 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8vwv" event={"ID":"d7f1a1ff-1ed9-4af5-8c03-324139a8952f","Type":"ContainerDied","Data":"1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c"} Oct 03 16:22:58 crc kubenswrapper[5081]: I1003 16:22:58.373279 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8vwv" event={"ID":"d7f1a1ff-1ed9-4af5-8c03-324139a8952f","Type":"ContainerStarted","Data":"7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83"} Oct 03 16:22:58 crc kubenswrapper[5081]: I1003 16:22:58.395587 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p8vwv" podStartSLOduration=2.89239919 podStartE2EDuration="5.395553032s" podCreationTimestamp="2025-10-03 16:22:53 +0000 UTC" firstStartedPulling="2025-10-03 16:22:55.3416321 +0000 UTC m=+3294.307188713" lastFinishedPulling="2025-10-03 16:22:57.844785942 +0000 UTC m=+3296.810342555" observedRunningTime="2025-10-03 16:22:58.393900354 +0000 UTC m=+3297.359456967" watchObservedRunningTime="2025-10-03 16:22:58.395553032 +0000 UTC m=+3297.361109645" Oct 03 16:23:04 crc kubenswrapper[5081]: I1003 16:23:04.351215 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:23:04 crc kubenswrapper[5081]: I1003 16:23:04.352764 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:23:04 crc kubenswrapper[5081]: I1003 16:23:04.400116 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:23:04 crc kubenswrapper[5081]: I1003 16:23:04.500873 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:23:04 crc kubenswrapper[5081]: I1003 16:23:04.655409 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8vwv"] Oct 03 16:23:06 crc kubenswrapper[5081]: I1003 16:23:06.454173 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p8vwv" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="registry-server" containerID="cri-o://7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83" gracePeriod=2 Oct 03 16:23:06 crc kubenswrapper[5081]: I1003 16:23:06.897020 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:23:06 crc kubenswrapper[5081]: I1003 16:23:06.954695 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-catalog-content\") pod \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " Oct 03 16:23:06 crc kubenswrapper[5081]: I1003 16:23:06.954917 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-utilities\") pod \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " Oct 03 16:23:06 crc kubenswrapper[5081]: I1003 16:23:06.954941 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rr7v\" (UniqueName: \"kubernetes.io/projected/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-kube-api-access-5rr7v\") pod \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\" (UID: \"d7f1a1ff-1ed9-4af5-8c03-324139a8952f\") " Oct 03 16:23:06 crc kubenswrapper[5081]: I1003 16:23:06.955694 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-utilities" (OuterVolumeSpecName: "utilities") pod "d7f1a1ff-1ed9-4af5-8c03-324139a8952f" (UID: "d7f1a1ff-1ed9-4af5-8c03-324139a8952f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:23:06 crc kubenswrapper[5081]: I1003 16:23:06.961643 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-kube-api-access-5rr7v" (OuterVolumeSpecName: "kube-api-access-5rr7v") pod "d7f1a1ff-1ed9-4af5-8c03-324139a8952f" (UID: "d7f1a1ff-1ed9-4af5-8c03-324139a8952f"). InnerVolumeSpecName "kube-api-access-5rr7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.047613 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d7f1a1ff-1ed9-4af5-8c03-324139a8952f" (UID: "d7f1a1ff-1ed9-4af5-8c03-324139a8952f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.057283 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.057317 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.057328 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rr7v\" (UniqueName: \"kubernetes.io/projected/d7f1a1ff-1ed9-4af5-8c03-324139a8952f-kube-api-access-5rr7v\") on node \"crc\" DevicePath \"\"" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.468501 5081 generic.go:334] "Generic (PLEG): container finished" podID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerID="7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83" exitCode=0 Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.468613 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8vwv" event={"ID":"d7f1a1ff-1ed9-4af5-8c03-324139a8952f","Type":"ContainerDied","Data":"7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83"} Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.469175 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8vwv" event={"ID":"d7f1a1ff-1ed9-4af5-8c03-324139a8952f","Type":"ContainerDied","Data":"ac8bcae9e732132a6675fa8aff98ae0f2407d5557ee15c95abfaa98df9ad7079"} Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.469205 5081 scope.go:117] "RemoveContainer" containerID="7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.468666 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8vwv" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.507515 5081 scope.go:117] "RemoveContainer" containerID="1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.521538 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8vwv"] Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.527805 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p8vwv"] Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.541404 5081 scope.go:117] "RemoveContainer" containerID="23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.565889 5081 scope.go:117] "RemoveContainer" containerID="7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83" Oct 03 16:23:07 crc kubenswrapper[5081]: E1003 16:23:07.566606 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83\": container with ID starting with 7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83 not found: ID does not exist" containerID="7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.566677 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83"} err="failed to get container status \"7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83\": rpc error: code = NotFound desc = could not find container \"7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83\": container with ID starting with 7119085821fdbd0e8afb68168ab6142888d1ea356f2f176a7b37dec104307f83 not found: ID does not exist" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.566713 5081 scope.go:117] "RemoveContainer" containerID="1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c" Oct 03 16:23:07 crc kubenswrapper[5081]: E1003 16:23:07.567310 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c\": container with ID starting with 1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c not found: ID does not exist" containerID="1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.567372 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c"} err="failed to get container status \"1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c\": rpc error: code = NotFound desc = could not find container \"1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c\": container with ID starting with 1e81adb1586a864737588b6b4ebdc5635012d22e324892aeefa26d890016ab1c not found: ID does not exist" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.567410 5081 scope.go:117] "RemoveContainer" containerID="23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267" Oct 03 16:23:07 crc kubenswrapper[5081]: E1003 16:23:07.567792 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267\": container with ID starting with 23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267 not found: ID does not exist" containerID="23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.567827 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267"} err="failed to get container status \"23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267\": rpc error: code = NotFound desc = could not find container \"23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267\": container with ID starting with 23012d483cb950e02af0f7080ce9f90ad6a2506740769eb7fa3d5df579aa0267 not found: ID does not exist" Oct 03 16:23:07 crc kubenswrapper[5081]: I1003 16:23:07.844627 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" path="/var/lib/kubelet/pods/d7f1a1ff-1ed9-4af5-8c03-324139a8952f/volumes" Oct 03 16:24:30 crc kubenswrapper[5081]: I1003 16:24:30.648057 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:24:30 crc kubenswrapper[5081]: I1003 16:24:30.648646 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:24:46 crc kubenswrapper[5081]: I1003 16:24:46.963993 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xzxbn"] Oct 03 16:24:46 crc kubenswrapper[5081]: E1003 16:24:46.966080 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="extract-content" Oct 03 16:24:46 crc kubenswrapper[5081]: I1003 16:24:46.966108 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="extract-content" Oct 03 16:24:46 crc kubenswrapper[5081]: E1003 16:24:46.966127 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="registry-server" Oct 03 16:24:46 crc kubenswrapper[5081]: I1003 16:24:46.966138 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="registry-server" Oct 03 16:24:46 crc kubenswrapper[5081]: E1003 16:24:46.966155 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="extract-utilities" Oct 03 16:24:46 crc kubenswrapper[5081]: I1003 16:24:46.966163 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="extract-utilities" Oct 03 16:24:46 crc kubenswrapper[5081]: I1003 16:24:46.966375 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f1a1ff-1ed9-4af5-8c03-324139a8952f" containerName="registry-server" Oct 03 16:24:46 crc kubenswrapper[5081]: I1003 16:24:46.968159 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:46 crc kubenswrapper[5081]: I1003 16:24:46.972089 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xzxbn"] Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.002927 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-catalog-content\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.003178 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9f44\" (UniqueName: \"kubernetes.io/projected/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-kube-api-access-c9f44\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.003244 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-utilities\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.104982 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-catalog-content\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.105142 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9f44\" (UniqueName: \"kubernetes.io/projected/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-kube-api-access-c9f44\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.105186 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-utilities\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.105728 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-catalog-content\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.105884 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-utilities\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.127256 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9f44\" (UniqueName: \"kubernetes.io/projected/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-kube-api-access-c9f44\") pod \"community-operators-xzxbn\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.299110 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:47 crc kubenswrapper[5081]: I1003 16:24:47.798444 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xzxbn"] Oct 03 16:24:48 crc kubenswrapper[5081]: I1003 16:24:48.347123 5081 generic.go:334] "Generic (PLEG): container finished" podID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerID="3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088" exitCode=0 Oct 03 16:24:48 crc kubenswrapper[5081]: I1003 16:24:48.347274 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzxbn" event={"ID":"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9","Type":"ContainerDied","Data":"3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088"} Oct 03 16:24:48 crc kubenswrapper[5081]: I1003 16:24:48.347590 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzxbn" event={"ID":"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9","Type":"ContainerStarted","Data":"6ca4d50bc4b1afc4c7de5c9582b44e1f69ee8bd21746b6710fb366c46b862851"} Oct 03 16:24:49 crc kubenswrapper[5081]: I1003 16:24:49.355396 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzxbn" event={"ID":"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9","Type":"ContainerStarted","Data":"1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac"} Oct 03 16:24:50 crc kubenswrapper[5081]: I1003 16:24:50.386293 5081 generic.go:334] "Generic (PLEG): container finished" podID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerID="1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac" exitCode=0 Oct 03 16:24:50 crc kubenswrapper[5081]: I1003 16:24:50.386358 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzxbn" event={"ID":"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9","Type":"ContainerDied","Data":"1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac"} Oct 03 16:24:51 crc kubenswrapper[5081]: I1003 16:24:51.404042 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzxbn" event={"ID":"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9","Type":"ContainerStarted","Data":"08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd"} Oct 03 16:24:51 crc kubenswrapper[5081]: I1003 16:24:51.429329 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xzxbn" podStartSLOduration=3.014813372 podStartE2EDuration="5.429307932s" podCreationTimestamp="2025-10-03 16:24:46 +0000 UTC" firstStartedPulling="2025-10-03 16:24:48.349841599 +0000 UTC m=+3407.315398212" lastFinishedPulling="2025-10-03 16:24:50.764336159 +0000 UTC m=+3409.729892772" observedRunningTime="2025-10-03 16:24:51.424677859 +0000 UTC m=+3410.390234482" watchObservedRunningTime="2025-10-03 16:24:51.429307932 +0000 UTC m=+3410.394864535" Oct 03 16:24:57 crc kubenswrapper[5081]: I1003 16:24:57.300032 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:57 crc kubenswrapper[5081]: I1003 16:24:57.300814 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:57 crc kubenswrapper[5081]: I1003 16:24:57.347554 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:57 crc kubenswrapper[5081]: I1003 16:24:57.508457 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:57 crc kubenswrapper[5081]: I1003 16:24:57.584401 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xzxbn"] Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.461406 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xzxbn" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="registry-server" containerID="cri-o://08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd" gracePeriod=2 Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.852661 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.908034 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9f44\" (UniqueName: \"kubernetes.io/projected/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-kube-api-access-c9f44\") pod \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.908160 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-catalog-content\") pod \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.908278 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-utilities\") pod \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\" (UID: \"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9\") " Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.909398 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-utilities" (OuterVolumeSpecName: "utilities") pod "dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" (UID: "dffd535a-f1cd-4577-8ea3-7eff3eeeebe9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.912993 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-kube-api-access-c9f44" (OuterVolumeSpecName: "kube-api-access-c9f44") pod "dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" (UID: "dffd535a-f1cd-4577-8ea3-7eff3eeeebe9"). InnerVolumeSpecName "kube-api-access-c9f44". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:24:59 crc kubenswrapper[5081]: I1003 16:24:59.956706 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" (UID: "dffd535a-f1cd-4577-8ea3-7eff3eeeebe9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.010504 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.010545 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.010608 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9f44\" (UniqueName: \"kubernetes.io/projected/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9-kube-api-access-c9f44\") on node \"crc\" DevicePath \"\"" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.471348 5081 generic.go:334] "Generic (PLEG): container finished" podID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerID="08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd" exitCode=0 Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.471399 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzxbn" event={"ID":"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9","Type":"ContainerDied","Data":"08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd"} Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.471410 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzxbn" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.471430 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzxbn" event={"ID":"dffd535a-f1cd-4577-8ea3-7eff3eeeebe9","Type":"ContainerDied","Data":"6ca4d50bc4b1afc4c7de5c9582b44e1f69ee8bd21746b6710fb366c46b862851"} Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.471450 5081 scope.go:117] "RemoveContainer" containerID="08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.488836 5081 scope.go:117] "RemoveContainer" containerID="1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.528045 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xzxbn"] Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.533468 5081 scope.go:117] "RemoveContainer" containerID="3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.534148 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xzxbn"] Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.556952 5081 scope.go:117] "RemoveContainer" containerID="08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd" Oct 03 16:25:00 crc kubenswrapper[5081]: E1003 16:25:00.557636 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd\": container with ID starting with 08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd not found: ID does not exist" containerID="08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.557689 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd"} err="failed to get container status \"08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd\": rpc error: code = NotFound desc = could not find container \"08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd\": container with ID starting with 08cfb0d432f8356f75d61dc5d62e11f98bee2fccc02d2b97e17af6b4e9e36cbd not found: ID does not exist" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.557727 5081 scope.go:117] "RemoveContainer" containerID="1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac" Oct 03 16:25:00 crc kubenswrapper[5081]: E1003 16:25:00.558155 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac\": container with ID starting with 1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac not found: ID does not exist" containerID="1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.558175 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac"} err="failed to get container status \"1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac\": rpc error: code = NotFound desc = could not find container \"1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac\": container with ID starting with 1bfa7411e271e0c582a46de452966f17b4e365ddd9f933b853a2053db04638ac not found: ID does not exist" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.558187 5081 scope.go:117] "RemoveContainer" containerID="3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088" Oct 03 16:25:00 crc kubenswrapper[5081]: E1003 16:25:00.558463 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088\": container with ID starting with 3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088 not found: ID does not exist" containerID="3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.558481 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088"} err="failed to get container status \"3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088\": rpc error: code = NotFound desc = could not find container \"3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088\": container with ID starting with 3fb1cdc5ea804b2b299e1600ad8f8d27e6317a256a1826b8e00fa15b8af74088 not found: ID does not exist" Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.647083 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:25:00 crc kubenswrapper[5081]: I1003 16:25:00.647149 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:25:01 crc kubenswrapper[5081]: I1003 16:25:01.838787 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" path="/var/lib/kubelet/pods/dffd535a-f1cd-4577-8ea3-7eff3eeeebe9/volumes" Oct 03 16:25:30 crc kubenswrapper[5081]: I1003 16:25:30.647435 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:25:30 crc kubenswrapper[5081]: I1003 16:25:30.648052 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:25:30 crc kubenswrapper[5081]: I1003 16:25:30.648108 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:25:30 crc kubenswrapper[5081]: I1003 16:25:30.648763 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7aa93fdf8651c00cb8cd3adf1c5bba9a97c18ff95e1bea35fb2897cf8cc080c4"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:25:30 crc kubenswrapper[5081]: I1003 16:25:30.648820 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://7aa93fdf8651c00cb8cd3adf1c5bba9a97c18ff95e1bea35fb2897cf8cc080c4" gracePeriod=600 Oct 03 16:25:31 crc kubenswrapper[5081]: I1003 16:25:31.724278 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="7aa93fdf8651c00cb8cd3adf1c5bba9a97c18ff95e1bea35fb2897cf8cc080c4" exitCode=0 Oct 03 16:25:31 crc kubenswrapper[5081]: I1003 16:25:31.724363 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"7aa93fdf8651c00cb8cd3adf1c5bba9a97c18ff95e1bea35fb2897cf8cc080c4"} Oct 03 16:25:31 crc kubenswrapper[5081]: I1003 16:25:31.724628 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6"} Oct 03 16:25:31 crc kubenswrapper[5081]: I1003 16:25:31.724653 5081 scope.go:117] "RemoveContainer" containerID="5a8c4461219b61705430ec9833e18132ed4032711f122b21f933fcd613f63030" Oct 03 16:27:30 crc kubenswrapper[5081]: I1003 16:27:30.647676 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:27:30 crc kubenswrapper[5081]: I1003 16:27:30.648217 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:28:00 crc kubenswrapper[5081]: I1003 16:28:00.648094 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:28:00 crc kubenswrapper[5081]: I1003 16:28:00.648696 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:28:30 crc kubenswrapper[5081]: I1003 16:28:30.648074 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:28:30 crc kubenswrapper[5081]: I1003 16:28:30.648713 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:28:30 crc kubenswrapper[5081]: I1003 16:28:30.648776 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:28:30 crc kubenswrapper[5081]: I1003 16:28:30.649377 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:28:30 crc kubenswrapper[5081]: I1003 16:28:30.649477 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" gracePeriod=600 Oct 03 16:28:30 crc kubenswrapper[5081]: E1003 16:28:30.769175 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:28:31 crc kubenswrapper[5081]: I1003 16:28:31.056612 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" exitCode=0 Oct 03 16:28:31 crc kubenswrapper[5081]: I1003 16:28:31.056688 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6"} Oct 03 16:28:31 crc kubenswrapper[5081]: I1003 16:28:31.056921 5081 scope.go:117] "RemoveContainer" containerID="7aa93fdf8651c00cb8cd3adf1c5bba9a97c18ff95e1bea35fb2897cf8cc080c4" Oct 03 16:28:31 crc kubenswrapper[5081]: I1003 16:28:31.057590 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:28:31 crc kubenswrapper[5081]: E1003 16:28:31.057989 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:28:42 crc kubenswrapper[5081]: I1003 16:28:42.830152 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:28:42 crc kubenswrapper[5081]: E1003 16:28:42.831308 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:28:42 crc kubenswrapper[5081]: E1003 16:28:42.875973 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 03 16:28:57 crc kubenswrapper[5081]: I1003 16:28:57.828013 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:28:57 crc kubenswrapper[5081]: E1003 16:28:57.829342 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:29:12 crc kubenswrapper[5081]: I1003 16:29:12.828791 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:29:12 crc kubenswrapper[5081]: E1003 16:29:12.829667 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:29:25 crc kubenswrapper[5081]: I1003 16:29:25.828018 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:29:25 crc kubenswrapper[5081]: E1003 16:29:25.828986 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:29:40 crc kubenswrapper[5081]: I1003 16:29:40.828552 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:29:40 crc kubenswrapper[5081]: E1003 16:29:40.829772 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:29:52 crc kubenswrapper[5081]: I1003 16:29:52.840038 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:29:52 crc kubenswrapper[5081]: E1003 16:29:52.841091 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.184380 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc"] Oct 03 16:30:00 crc kubenswrapper[5081]: E1003 16:30:00.185273 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="extract-content" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.185289 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="extract-content" Oct 03 16:30:00 crc kubenswrapper[5081]: E1003 16:30:00.185302 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="registry-server" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.185310 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="registry-server" Oct 03 16:30:00 crc kubenswrapper[5081]: E1003 16:30:00.185316 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="extract-utilities" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.185323 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="extract-utilities" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.185462 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="dffd535a-f1cd-4577-8ea3-7eff3eeeebe9" containerName="registry-server" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.186289 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.191268 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.193089 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.199073 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc"] Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.301771 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9nsf\" (UniqueName: \"kubernetes.io/projected/d4dd4268-3d1d-42ee-abb7-e03624082842-kube-api-access-c9nsf\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.301831 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4dd4268-3d1d-42ee-abb7-e03624082842-config-volume\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.301878 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4dd4268-3d1d-42ee-abb7-e03624082842-secret-volume\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.403194 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9nsf\" (UniqueName: \"kubernetes.io/projected/d4dd4268-3d1d-42ee-abb7-e03624082842-kube-api-access-c9nsf\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.403262 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4dd4268-3d1d-42ee-abb7-e03624082842-config-volume\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.403296 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4dd4268-3d1d-42ee-abb7-e03624082842-secret-volume\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.406441 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4dd4268-3d1d-42ee-abb7-e03624082842-config-volume\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.409614 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4dd4268-3d1d-42ee-abb7-e03624082842-secret-volume\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.424303 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9nsf\" (UniqueName: \"kubernetes.io/projected/d4dd4268-3d1d-42ee-abb7-e03624082842-kube-api-access-c9nsf\") pod \"collect-profiles-29325150-sl8zc\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.508147 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:00 crc kubenswrapper[5081]: I1003 16:30:00.948506 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc"] Oct 03 16:30:01 crc kubenswrapper[5081]: I1003 16:30:01.735460 5081 generic.go:334] "Generic (PLEG): container finished" podID="d4dd4268-3d1d-42ee-abb7-e03624082842" containerID="93cddce6d79b185c4d62099eaefbc46316033a9853d46ee9c735461e460a29c1" exitCode=0 Oct 03 16:30:01 crc kubenswrapper[5081]: I1003 16:30:01.735602 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" event={"ID":"d4dd4268-3d1d-42ee-abb7-e03624082842","Type":"ContainerDied","Data":"93cddce6d79b185c4d62099eaefbc46316033a9853d46ee9c735461e460a29c1"} Oct 03 16:30:01 crc kubenswrapper[5081]: I1003 16:30:01.736921 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" event={"ID":"d4dd4268-3d1d-42ee-abb7-e03624082842","Type":"ContainerStarted","Data":"c00dd57f09f81f408f00814b9e9a38fc4c50354388595e9d3cb4cac801b92b48"} Oct 03 16:30:02 crc kubenswrapper[5081]: I1003 16:30:02.991362 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.141472 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9nsf\" (UniqueName: \"kubernetes.io/projected/d4dd4268-3d1d-42ee-abb7-e03624082842-kube-api-access-c9nsf\") pod \"d4dd4268-3d1d-42ee-abb7-e03624082842\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.141531 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4dd4268-3d1d-42ee-abb7-e03624082842-config-volume\") pod \"d4dd4268-3d1d-42ee-abb7-e03624082842\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.141623 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4dd4268-3d1d-42ee-abb7-e03624082842-secret-volume\") pod \"d4dd4268-3d1d-42ee-abb7-e03624082842\" (UID: \"d4dd4268-3d1d-42ee-abb7-e03624082842\") " Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.142245 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4dd4268-3d1d-42ee-abb7-e03624082842-config-volume" (OuterVolumeSpecName: "config-volume") pod "d4dd4268-3d1d-42ee-abb7-e03624082842" (UID: "d4dd4268-3d1d-42ee-abb7-e03624082842"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.146772 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4dd4268-3d1d-42ee-abb7-e03624082842-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d4dd4268-3d1d-42ee-abb7-e03624082842" (UID: "d4dd4268-3d1d-42ee-abb7-e03624082842"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.146924 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4dd4268-3d1d-42ee-abb7-e03624082842-kube-api-access-c9nsf" (OuterVolumeSpecName: "kube-api-access-c9nsf") pod "d4dd4268-3d1d-42ee-abb7-e03624082842" (UID: "d4dd4268-3d1d-42ee-abb7-e03624082842"). InnerVolumeSpecName "kube-api-access-c9nsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.243097 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9nsf\" (UniqueName: \"kubernetes.io/projected/d4dd4268-3d1d-42ee-abb7-e03624082842-kube-api-access-c9nsf\") on node \"crc\" DevicePath \"\"" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.243136 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4dd4268-3d1d-42ee-abb7-e03624082842-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.243144 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4dd4268-3d1d-42ee-abb7-e03624082842-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.751486 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" event={"ID":"d4dd4268-3d1d-42ee-abb7-e03624082842","Type":"ContainerDied","Data":"c00dd57f09f81f408f00814b9e9a38fc4c50354388595e9d3cb4cac801b92b48"} Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.751525 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c00dd57f09f81f408f00814b9e9a38fc4c50354388595e9d3cb4cac801b92b48" Oct 03 16:30:03 crc kubenswrapper[5081]: I1003 16:30:03.751596 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc" Oct 03 16:30:04 crc kubenswrapper[5081]: I1003 16:30:04.058494 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs"] Oct 03 16:30:04 crc kubenswrapper[5081]: I1003 16:30:04.063722 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325105-6t6hs"] Oct 03 16:30:04 crc kubenswrapper[5081]: I1003 16:30:04.827946 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:30:04 crc kubenswrapper[5081]: E1003 16:30:04.828463 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:30:05 crc kubenswrapper[5081]: I1003 16:30:05.838485 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb" path="/var/lib/kubelet/pods/c1c8cdc8-72c5-48c5-97fb-35e1c11d04eb/volumes" Oct 03 16:30:09 crc kubenswrapper[5081]: I1003 16:30:09.955239 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pxqcv"] Oct 03 16:30:09 crc kubenswrapper[5081]: E1003 16:30:09.955991 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4dd4268-3d1d-42ee-abb7-e03624082842" containerName="collect-profiles" Oct 03 16:30:09 crc kubenswrapper[5081]: I1003 16:30:09.956007 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4dd4268-3d1d-42ee-abb7-e03624082842" containerName="collect-profiles" Oct 03 16:30:09 crc kubenswrapper[5081]: I1003 16:30:09.956210 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4dd4268-3d1d-42ee-abb7-e03624082842" containerName="collect-profiles" Oct 03 16:30:09 crc kubenswrapper[5081]: I1003 16:30:09.957636 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:09 crc kubenswrapper[5081]: I1003 16:30:09.975472 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pxqcv"] Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.043669 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-catalog-content\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.043729 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjp8d\" (UniqueName: \"kubernetes.io/projected/222233c7-cc6b-4cc1-92c0-c76abf4fab83-kube-api-access-vjp8d\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.043765 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-utilities\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.145252 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-catalog-content\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.145583 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjp8d\" (UniqueName: \"kubernetes.io/projected/222233c7-cc6b-4cc1-92c0-c76abf4fab83-kube-api-access-vjp8d\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.145689 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-utilities\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.145936 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-catalog-content\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.146047 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-utilities\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.167150 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjp8d\" (UniqueName: \"kubernetes.io/projected/222233c7-cc6b-4cc1-92c0-c76abf4fab83-kube-api-access-vjp8d\") pod \"certified-operators-pxqcv\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.295855 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.570251 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pxqcv"] Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.800108 5081 generic.go:334] "Generic (PLEG): container finished" podID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerID="e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc" exitCode=0 Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.800166 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxqcv" event={"ID":"222233c7-cc6b-4cc1-92c0-c76abf4fab83","Type":"ContainerDied","Data":"e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc"} Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.800203 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxqcv" event={"ID":"222233c7-cc6b-4cc1-92c0-c76abf4fab83","Type":"ContainerStarted","Data":"ebba0458d8c7839a2bd9e1339bdb5d6dc8a0be3e6b1d6ca7ed9cd6788ed2d8a0"} Oct 03 16:30:10 crc kubenswrapper[5081]: I1003 16:30:10.801545 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:30:11 crc kubenswrapper[5081]: I1003 16:30:11.810293 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxqcv" event={"ID":"222233c7-cc6b-4cc1-92c0-c76abf4fab83","Type":"ContainerStarted","Data":"934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f"} Oct 03 16:30:12 crc kubenswrapper[5081]: I1003 16:30:12.820234 5081 generic.go:334] "Generic (PLEG): container finished" podID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerID="934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f" exitCode=0 Oct 03 16:30:12 crc kubenswrapper[5081]: I1003 16:30:12.820311 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxqcv" event={"ID":"222233c7-cc6b-4cc1-92c0-c76abf4fab83","Type":"ContainerDied","Data":"934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f"} Oct 03 16:30:13 crc kubenswrapper[5081]: I1003 16:30:13.846019 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxqcv" event={"ID":"222233c7-cc6b-4cc1-92c0-c76abf4fab83","Type":"ContainerStarted","Data":"074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46"} Oct 03 16:30:13 crc kubenswrapper[5081]: I1003 16:30:13.854083 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pxqcv" podStartSLOduration=2.410534045 podStartE2EDuration="4.854061423s" podCreationTimestamp="2025-10-03 16:30:09 +0000 UTC" firstStartedPulling="2025-10-03 16:30:10.801348991 +0000 UTC m=+3729.766905604" lastFinishedPulling="2025-10-03 16:30:13.244876369 +0000 UTC m=+3732.210432982" observedRunningTime="2025-10-03 16:30:13.851795878 +0000 UTC m=+3732.817352481" watchObservedRunningTime="2025-10-03 16:30:13.854061423 +0000 UTC m=+3732.819618026" Oct 03 16:30:17 crc kubenswrapper[5081]: I1003 16:30:17.828208 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:30:17 crc kubenswrapper[5081]: E1003 16:30:17.828942 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:30:20 crc kubenswrapper[5081]: I1003 16:30:20.296334 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:20 crc kubenswrapper[5081]: I1003 16:30:20.296389 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:20 crc kubenswrapper[5081]: I1003 16:30:20.341670 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:20 crc kubenswrapper[5081]: I1003 16:30:20.925301 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:20 crc kubenswrapper[5081]: I1003 16:30:20.972857 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pxqcv"] Oct 03 16:30:21 crc kubenswrapper[5081]: I1003 16:30:21.760450 5081 scope.go:117] "RemoveContainer" containerID="8ada1bf2fbde3ad868bd396a3986916612c1804e08ddb3c8c21cc1cf824a1007" Oct 03 16:30:22 crc kubenswrapper[5081]: I1003 16:30:22.894131 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pxqcv" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="registry-server" containerID="cri-o://074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46" gracePeriod=2 Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.268427 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.335475 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-utilities\") pod \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.335529 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-catalog-content\") pod \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.335665 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjp8d\" (UniqueName: \"kubernetes.io/projected/222233c7-cc6b-4cc1-92c0-c76abf4fab83-kube-api-access-vjp8d\") pod \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\" (UID: \"222233c7-cc6b-4cc1-92c0-c76abf4fab83\") " Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.336396 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-utilities" (OuterVolumeSpecName: "utilities") pod "222233c7-cc6b-4cc1-92c0-c76abf4fab83" (UID: "222233c7-cc6b-4cc1-92c0-c76abf4fab83"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.341170 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/222233c7-cc6b-4cc1-92c0-c76abf4fab83-kube-api-access-vjp8d" (OuterVolumeSpecName: "kube-api-access-vjp8d") pod "222233c7-cc6b-4cc1-92c0-c76abf4fab83" (UID: "222233c7-cc6b-4cc1-92c0-c76abf4fab83"). InnerVolumeSpecName "kube-api-access-vjp8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.381905 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "222233c7-cc6b-4cc1-92c0-c76abf4fab83" (UID: "222233c7-cc6b-4cc1-92c0-c76abf4fab83"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.437003 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.437031 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/222233c7-cc6b-4cc1-92c0-c76abf4fab83-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.437042 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjp8d\" (UniqueName: \"kubernetes.io/projected/222233c7-cc6b-4cc1-92c0-c76abf4fab83-kube-api-access-vjp8d\") on node \"crc\" DevicePath \"\"" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.906299 5081 generic.go:334] "Generic (PLEG): container finished" podID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerID="074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46" exitCode=0 Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.906368 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxqcv" event={"ID":"222233c7-cc6b-4cc1-92c0-c76abf4fab83","Type":"ContainerDied","Data":"074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46"} Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.906728 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxqcv" event={"ID":"222233c7-cc6b-4cc1-92c0-c76abf4fab83","Type":"ContainerDied","Data":"ebba0458d8c7839a2bd9e1339bdb5d6dc8a0be3e6b1d6ca7ed9cd6788ed2d8a0"} Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.906756 5081 scope.go:117] "RemoveContainer" containerID="074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.906617 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxqcv" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.933102 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pxqcv"] Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.936016 5081 scope.go:117] "RemoveContainer" containerID="934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.938336 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pxqcv"] Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.954852 5081 scope.go:117] "RemoveContainer" containerID="e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.984684 5081 scope.go:117] "RemoveContainer" containerID="074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46" Oct 03 16:30:23 crc kubenswrapper[5081]: E1003 16:30:23.985214 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46\": container with ID starting with 074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46 not found: ID does not exist" containerID="074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.985255 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46"} err="failed to get container status \"074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46\": rpc error: code = NotFound desc = could not find container \"074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46\": container with ID starting with 074b888b73aa98ab9010777d22ce3d2f7b7f97cda7a6ce70b0741121685f6a46 not found: ID does not exist" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.985282 5081 scope.go:117] "RemoveContainer" containerID="934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f" Oct 03 16:30:23 crc kubenswrapper[5081]: E1003 16:30:23.985803 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f\": container with ID starting with 934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f not found: ID does not exist" containerID="934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.985840 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f"} err="failed to get container status \"934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f\": rpc error: code = NotFound desc = could not find container \"934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f\": container with ID starting with 934659f032b3e802989f2a8618ed87b448fdb9ec00377159dcef4566f9ff0f7f not found: ID does not exist" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.985881 5081 scope.go:117] "RemoveContainer" containerID="e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc" Oct 03 16:30:23 crc kubenswrapper[5081]: E1003 16:30:23.986145 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc\": container with ID starting with e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc not found: ID does not exist" containerID="e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc" Oct 03 16:30:23 crc kubenswrapper[5081]: I1003 16:30:23.986171 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc"} err="failed to get container status \"e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc\": rpc error: code = NotFound desc = could not find container \"e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc\": container with ID starting with e0b9d31ff8e5f7bbe958dca2eb3b6a3f4f17d7848d519e2ab353687241ffabdc not found: ID does not exist" Oct 03 16:30:25 crc kubenswrapper[5081]: I1003 16:30:25.835894 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" path="/var/lib/kubelet/pods/222233c7-cc6b-4cc1-92c0-c76abf4fab83/volumes" Oct 03 16:30:28 crc kubenswrapper[5081]: I1003 16:30:28.827687 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:30:28 crc kubenswrapper[5081]: E1003 16:30:28.828277 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:30:40 crc kubenswrapper[5081]: I1003 16:30:40.828375 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:30:40 crc kubenswrapper[5081]: E1003 16:30:40.829348 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:30:55 crc kubenswrapper[5081]: I1003 16:30:55.827411 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:30:55 crc kubenswrapper[5081]: E1003 16:30:55.828168 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:31:09 crc kubenswrapper[5081]: I1003 16:31:09.828073 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:31:09 crc kubenswrapper[5081]: E1003 16:31:09.828736 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:31:20 crc kubenswrapper[5081]: I1003 16:31:20.827451 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:31:20 crc kubenswrapper[5081]: E1003 16:31:20.828183 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:31:33 crc kubenswrapper[5081]: I1003 16:31:33.827952 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:31:33 crc kubenswrapper[5081]: E1003 16:31:33.828715 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.280274 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tplwf"] Oct 03 16:31:35 crc kubenswrapper[5081]: E1003 16:31:35.280607 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="extract-utilities" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.280621 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="extract-utilities" Oct 03 16:31:35 crc kubenswrapper[5081]: E1003 16:31:35.280632 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="registry-server" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.280639 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="registry-server" Oct 03 16:31:35 crc kubenswrapper[5081]: E1003 16:31:35.280662 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="extract-content" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.280668 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="extract-content" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.280828 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="222233c7-cc6b-4cc1-92c0-c76abf4fab83" containerName="registry-server" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.281856 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.298237 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lmzz\" (UniqueName: \"kubernetes.io/projected/da3a835e-f782-40b3-b204-1e26eae4700d-kube-api-access-7lmzz\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.298363 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-utilities\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.298422 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-catalog-content\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.299092 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tplwf"] Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.399409 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lmzz\" (UniqueName: \"kubernetes.io/projected/da3a835e-f782-40b3-b204-1e26eae4700d-kube-api-access-7lmzz\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.399522 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-utilities\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.399657 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-catalog-content\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.400058 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-utilities\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.400119 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-catalog-content\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.425092 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lmzz\" (UniqueName: \"kubernetes.io/projected/da3a835e-f782-40b3-b204-1e26eae4700d-kube-api-access-7lmzz\") pod \"redhat-marketplace-tplwf\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:35 crc kubenswrapper[5081]: I1003 16:31:35.602929 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:36 crc kubenswrapper[5081]: I1003 16:31:36.022238 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tplwf"] Oct 03 16:31:36 crc kubenswrapper[5081]: W1003 16:31:36.042997 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda3a835e_f782_40b3_b204_1e26eae4700d.slice/crio-2b4facca4f8501ed3cb24d73853155a936bfdf565edbb8bdff2bafb52d585955 WatchSource:0}: Error finding container 2b4facca4f8501ed3cb24d73853155a936bfdf565edbb8bdff2bafb52d585955: Status 404 returned error can't find the container with id 2b4facca4f8501ed3cb24d73853155a936bfdf565edbb8bdff2bafb52d585955 Oct 03 16:31:36 crc kubenswrapper[5081]: I1003 16:31:36.409885 5081 generic.go:334] "Generic (PLEG): container finished" podID="da3a835e-f782-40b3-b204-1e26eae4700d" containerID="8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df" exitCode=0 Oct 03 16:31:36 crc kubenswrapper[5081]: I1003 16:31:36.409960 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tplwf" event={"ID":"da3a835e-f782-40b3-b204-1e26eae4700d","Type":"ContainerDied","Data":"8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df"} Oct 03 16:31:36 crc kubenswrapper[5081]: I1003 16:31:36.410476 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tplwf" event={"ID":"da3a835e-f782-40b3-b204-1e26eae4700d","Type":"ContainerStarted","Data":"2b4facca4f8501ed3cb24d73853155a936bfdf565edbb8bdff2bafb52d585955"} Oct 03 16:31:38 crc kubenswrapper[5081]: I1003 16:31:38.424904 5081 generic.go:334] "Generic (PLEG): container finished" podID="da3a835e-f782-40b3-b204-1e26eae4700d" containerID="d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44" exitCode=0 Oct 03 16:31:38 crc kubenswrapper[5081]: I1003 16:31:38.425262 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tplwf" event={"ID":"da3a835e-f782-40b3-b204-1e26eae4700d","Type":"ContainerDied","Data":"d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44"} Oct 03 16:31:39 crc kubenswrapper[5081]: I1003 16:31:39.435627 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tplwf" event={"ID":"da3a835e-f782-40b3-b204-1e26eae4700d","Type":"ContainerStarted","Data":"1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef"} Oct 03 16:31:39 crc kubenswrapper[5081]: I1003 16:31:39.452305 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tplwf" podStartSLOduration=1.9184542420000001 podStartE2EDuration="4.452280933s" podCreationTimestamp="2025-10-03 16:31:35 +0000 UTC" firstStartedPulling="2025-10-03 16:31:36.41277792 +0000 UTC m=+3815.378334533" lastFinishedPulling="2025-10-03 16:31:38.946604611 +0000 UTC m=+3817.912161224" observedRunningTime="2025-10-03 16:31:39.451041497 +0000 UTC m=+3818.416598130" watchObservedRunningTime="2025-10-03 16:31:39.452280933 +0000 UTC m=+3818.417837556" Oct 03 16:31:45 crc kubenswrapper[5081]: I1003 16:31:45.604254 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:45 crc kubenswrapper[5081]: I1003 16:31:45.604864 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:45 crc kubenswrapper[5081]: I1003 16:31:45.640903 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:46 crc kubenswrapper[5081]: I1003 16:31:46.518406 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:46 crc kubenswrapper[5081]: I1003 16:31:46.562001 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tplwf"] Oct 03 16:31:47 crc kubenswrapper[5081]: I1003 16:31:47.828100 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:31:47 crc kubenswrapper[5081]: E1003 16:31:47.828553 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.496899 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tplwf" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="registry-server" containerID="cri-o://1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef" gracePeriod=2 Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.869877 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.982541 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-catalog-content\") pod \"da3a835e-f782-40b3-b204-1e26eae4700d\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.983746 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lmzz\" (UniqueName: \"kubernetes.io/projected/da3a835e-f782-40b3-b204-1e26eae4700d-kube-api-access-7lmzz\") pod \"da3a835e-f782-40b3-b204-1e26eae4700d\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.984130 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-utilities\") pod \"da3a835e-f782-40b3-b204-1e26eae4700d\" (UID: \"da3a835e-f782-40b3-b204-1e26eae4700d\") " Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.985059 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-utilities" (OuterVolumeSpecName: "utilities") pod "da3a835e-f782-40b3-b204-1e26eae4700d" (UID: "da3a835e-f782-40b3-b204-1e26eae4700d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.994018 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da3a835e-f782-40b3-b204-1e26eae4700d-kube-api-access-7lmzz" (OuterVolumeSpecName: "kube-api-access-7lmzz") pod "da3a835e-f782-40b3-b204-1e26eae4700d" (UID: "da3a835e-f782-40b3-b204-1e26eae4700d"). InnerVolumeSpecName "kube-api-access-7lmzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:31:48 crc kubenswrapper[5081]: I1003 16:31:48.996051 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da3a835e-f782-40b3-b204-1e26eae4700d" (UID: "da3a835e-f782-40b3-b204-1e26eae4700d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.085381 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.085420 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lmzz\" (UniqueName: \"kubernetes.io/projected/da3a835e-f782-40b3-b204-1e26eae4700d-kube-api-access-7lmzz\") on node \"crc\" DevicePath \"\"" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.085433 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da3a835e-f782-40b3-b204-1e26eae4700d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.505548 5081 generic.go:334] "Generic (PLEG): container finished" podID="da3a835e-f782-40b3-b204-1e26eae4700d" containerID="1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef" exitCode=0 Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.505637 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tplwf" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.505685 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tplwf" event={"ID":"da3a835e-f782-40b3-b204-1e26eae4700d","Type":"ContainerDied","Data":"1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef"} Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.506501 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tplwf" event={"ID":"da3a835e-f782-40b3-b204-1e26eae4700d","Type":"ContainerDied","Data":"2b4facca4f8501ed3cb24d73853155a936bfdf565edbb8bdff2bafb52d585955"} Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.506591 5081 scope.go:117] "RemoveContainer" containerID="1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.529899 5081 scope.go:117] "RemoveContainer" containerID="d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.544712 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tplwf"] Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.554544 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tplwf"] Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.564310 5081 scope.go:117] "RemoveContainer" containerID="8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.581684 5081 scope.go:117] "RemoveContainer" containerID="1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef" Oct 03 16:31:49 crc kubenswrapper[5081]: E1003 16:31:49.582078 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef\": container with ID starting with 1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef not found: ID does not exist" containerID="1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.582108 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef"} err="failed to get container status \"1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef\": rpc error: code = NotFound desc = could not find container \"1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef\": container with ID starting with 1958fbc523d19ff5897f81ff77309578eaf3ffe33232a9c1833864b687533eef not found: ID does not exist" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.582128 5081 scope.go:117] "RemoveContainer" containerID="d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44" Oct 03 16:31:49 crc kubenswrapper[5081]: E1003 16:31:49.582392 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44\": container with ID starting with d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44 not found: ID does not exist" containerID="d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.582416 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44"} err="failed to get container status \"d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44\": rpc error: code = NotFound desc = could not find container \"d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44\": container with ID starting with d2b49c265091f0ed59f6d128dfa050abc25c410fa0e227929609bef00fc37c44 not found: ID does not exist" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.582429 5081 scope.go:117] "RemoveContainer" containerID="8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df" Oct 03 16:31:49 crc kubenswrapper[5081]: E1003 16:31:49.582767 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df\": container with ID starting with 8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df not found: ID does not exist" containerID="8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.582810 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df"} err="failed to get container status \"8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df\": rpc error: code = NotFound desc = could not find container \"8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df\": container with ID starting with 8baff5b08288ce9d76c27549480d11609d111062780245ac06c28d8d546b79df not found: ID does not exist" Oct 03 16:31:49 crc kubenswrapper[5081]: I1003 16:31:49.835730 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" path="/var/lib/kubelet/pods/da3a835e-f782-40b3-b204-1e26eae4700d/volumes" Oct 03 16:32:02 crc kubenswrapper[5081]: I1003 16:32:02.828137 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:32:02 crc kubenswrapper[5081]: E1003 16:32:02.829233 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:32:17 crc kubenswrapper[5081]: I1003 16:32:17.827876 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:32:17 crc kubenswrapper[5081]: E1003 16:32:17.828593 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:32:28 crc kubenswrapper[5081]: I1003 16:32:28.827383 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:32:28 crc kubenswrapper[5081]: E1003 16:32:28.828218 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:32:43 crc kubenswrapper[5081]: I1003 16:32:43.827125 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:32:43 crc kubenswrapper[5081]: E1003 16:32:43.827839 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:32:56 crc kubenswrapper[5081]: I1003 16:32:56.827583 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:32:56 crc kubenswrapper[5081]: E1003 16:32:56.828325 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:33:10 crc kubenswrapper[5081]: I1003 16:33:10.827494 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:33:10 crc kubenswrapper[5081]: E1003 16:33:10.828312 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:33:21 crc kubenswrapper[5081]: I1003 16:33:21.832096 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:33:21 crc kubenswrapper[5081]: E1003 16:33:21.832939 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:33:35 crc kubenswrapper[5081]: I1003 16:33:35.828011 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:33:36 crc kubenswrapper[5081]: I1003 16:33:36.263774 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"b70279e791bfbe86746508ae7855dd30f11246a0be0820c48470a16988c251dd"} Oct 03 16:35:33 crc kubenswrapper[5081]: I1003 16:35:33.889020 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-db7hc"] Oct 03 16:35:33 crc kubenswrapper[5081]: E1003 16:35:33.889838 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="extract-utilities" Oct 03 16:35:33 crc kubenswrapper[5081]: I1003 16:35:33.889853 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="extract-utilities" Oct 03 16:35:33 crc kubenswrapper[5081]: E1003 16:35:33.889889 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="registry-server" Oct 03 16:35:33 crc kubenswrapper[5081]: I1003 16:35:33.889898 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="registry-server" Oct 03 16:35:33 crc kubenswrapper[5081]: E1003 16:35:33.889919 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="extract-content" Oct 03 16:35:33 crc kubenswrapper[5081]: I1003 16:35:33.889926 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="extract-content" Oct 03 16:35:33 crc kubenswrapper[5081]: I1003 16:35:33.890263 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="da3a835e-f782-40b3-b204-1e26eae4700d" containerName="registry-server" Oct 03 16:35:33 crc kubenswrapper[5081]: I1003 16:35:33.891387 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:33 crc kubenswrapper[5081]: I1003 16:35:33.902253 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-db7hc"] Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.010832 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-catalog-content\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.010902 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcz87\" (UniqueName: \"kubernetes.io/projected/22aa209f-c00a-43ad-8f9e-e7b7742160ea-kube-api-access-dcz87\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.010939 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-utilities\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.112273 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-catalog-content\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.112333 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcz87\" (UniqueName: \"kubernetes.io/projected/22aa209f-c00a-43ad-8f9e-e7b7742160ea-kube-api-access-dcz87\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.112363 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-utilities\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.112892 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-utilities\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.112939 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-catalog-content\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.135666 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcz87\" (UniqueName: \"kubernetes.io/projected/22aa209f-c00a-43ad-8f9e-e7b7742160ea-kube-api-access-dcz87\") pod \"community-operators-db7hc\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.210488 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:34 crc kubenswrapper[5081]: I1003 16:35:34.678201 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-db7hc"] Oct 03 16:35:35 crc kubenswrapper[5081]: I1003 16:35:35.133237 5081 generic.go:334] "Generic (PLEG): container finished" podID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerID="3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0" exitCode=0 Oct 03 16:35:35 crc kubenswrapper[5081]: I1003 16:35:35.133313 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db7hc" event={"ID":"22aa209f-c00a-43ad-8f9e-e7b7742160ea","Type":"ContainerDied","Data":"3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0"} Oct 03 16:35:35 crc kubenswrapper[5081]: I1003 16:35:35.133638 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db7hc" event={"ID":"22aa209f-c00a-43ad-8f9e-e7b7742160ea","Type":"ContainerStarted","Data":"cab7417085c56bb94d6d6c3d42d9d00515f0aa53f1fb715cafbf9f80ea00b223"} Oct 03 16:35:35 crc kubenswrapper[5081]: I1003 16:35:35.135151 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:35:36 crc kubenswrapper[5081]: I1003 16:35:36.140443 5081 generic.go:334] "Generic (PLEG): container finished" podID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerID="79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296" exitCode=0 Oct 03 16:35:36 crc kubenswrapper[5081]: I1003 16:35:36.140546 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db7hc" event={"ID":"22aa209f-c00a-43ad-8f9e-e7b7742160ea","Type":"ContainerDied","Data":"79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296"} Oct 03 16:35:37 crc kubenswrapper[5081]: I1003 16:35:37.150493 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db7hc" event={"ID":"22aa209f-c00a-43ad-8f9e-e7b7742160ea","Type":"ContainerStarted","Data":"851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2"} Oct 03 16:35:37 crc kubenswrapper[5081]: I1003 16:35:37.175970 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-db7hc" podStartSLOduration=2.551586898 podStartE2EDuration="4.175952406s" podCreationTimestamp="2025-10-03 16:35:33 +0000 UTC" firstStartedPulling="2025-10-03 16:35:35.13489225 +0000 UTC m=+4054.100448863" lastFinishedPulling="2025-10-03 16:35:36.759257768 +0000 UTC m=+4055.724814371" observedRunningTime="2025-10-03 16:35:37.16767625 +0000 UTC m=+4056.133232863" watchObservedRunningTime="2025-10-03 16:35:37.175952406 +0000 UTC m=+4056.141509019" Oct 03 16:35:44 crc kubenswrapper[5081]: I1003 16:35:44.210905 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:44 crc kubenswrapper[5081]: I1003 16:35:44.211670 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:44 crc kubenswrapper[5081]: I1003 16:35:44.631626 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:45 crc kubenswrapper[5081]: I1003 16:35:45.238949 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:45 crc kubenswrapper[5081]: I1003 16:35:45.278348 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-db7hc"] Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.217841 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-db7hc" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="registry-server" containerID="cri-o://851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2" gracePeriod=2 Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.673793 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.823328 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-catalog-content\") pod \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.823445 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-utilities\") pod \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.823545 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcz87\" (UniqueName: \"kubernetes.io/projected/22aa209f-c00a-43ad-8f9e-e7b7742160ea-kube-api-access-dcz87\") pod \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\" (UID: \"22aa209f-c00a-43ad-8f9e-e7b7742160ea\") " Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.824409 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-utilities" (OuterVolumeSpecName: "utilities") pod "22aa209f-c00a-43ad-8f9e-e7b7742160ea" (UID: "22aa209f-c00a-43ad-8f9e-e7b7742160ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.828535 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22aa209f-c00a-43ad-8f9e-e7b7742160ea-kube-api-access-dcz87" (OuterVolumeSpecName: "kube-api-access-dcz87") pod "22aa209f-c00a-43ad-8f9e-e7b7742160ea" (UID: "22aa209f-c00a-43ad-8f9e-e7b7742160ea"). InnerVolumeSpecName "kube-api-access-dcz87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.872122 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22aa209f-c00a-43ad-8f9e-e7b7742160ea" (UID: "22aa209f-c00a-43ad-8f9e-e7b7742160ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.925659 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.925686 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22aa209f-c00a-43ad-8f9e-e7b7742160ea-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:35:47 crc kubenswrapper[5081]: I1003 16:35:47.925698 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcz87\" (UniqueName: \"kubernetes.io/projected/22aa209f-c00a-43ad-8f9e-e7b7742160ea-kube-api-access-dcz87\") on node \"crc\" DevicePath \"\"" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.225134 5081 generic.go:334] "Generic (PLEG): container finished" podID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerID="851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2" exitCode=0 Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.225172 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db7hc" event={"ID":"22aa209f-c00a-43ad-8f9e-e7b7742160ea","Type":"ContainerDied","Data":"851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2"} Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.225193 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db7hc" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.225203 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db7hc" event={"ID":"22aa209f-c00a-43ad-8f9e-e7b7742160ea","Type":"ContainerDied","Data":"cab7417085c56bb94d6d6c3d42d9d00515f0aa53f1fb715cafbf9f80ea00b223"} Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.225220 5081 scope.go:117] "RemoveContainer" containerID="851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.258191 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-db7hc"] Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.262717 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-db7hc"] Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.269387 5081 scope.go:117] "RemoveContainer" containerID="79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.287573 5081 scope.go:117] "RemoveContainer" containerID="3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.313353 5081 scope.go:117] "RemoveContainer" containerID="851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2" Oct 03 16:35:48 crc kubenswrapper[5081]: E1003 16:35:48.314343 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2\": container with ID starting with 851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2 not found: ID does not exist" containerID="851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.314376 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2"} err="failed to get container status \"851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2\": rpc error: code = NotFound desc = could not find container \"851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2\": container with ID starting with 851da73194c50d4efb870094a9a01ed72e6ec8b6ff1a6e6a840f9b92923981c2 not found: ID does not exist" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.314412 5081 scope.go:117] "RemoveContainer" containerID="79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296" Oct 03 16:35:48 crc kubenswrapper[5081]: E1003 16:35:48.314800 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296\": container with ID starting with 79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296 not found: ID does not exist" containerID="79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.314854 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296"} err="failed to get container status \"79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296\": rpc error: code = NotFound desc = could not find container \"79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296\": container with ID starting with 79b0233a8b6541748fae9d7ba405581acbab69af2c7ecb1348f5167e62848296 not found: ID does not exist" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.314890 5081 scope.go:117] "RemoveContainer" containerID="3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0" Oct 03 16:35:48 crc kubenswrapper[5081]: E1003 16:35:48.315172 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0\": container with ID starting with 3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0 not found: ID does not exist" containerID="3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0" Oct 03 16:35:48 crc kubenswrapper[5081]: I1003 16:35:48.315193 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0"} err="failed to get container status \"3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0\": rpc error: code = NotFound desc = could not find container \"3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0\": container with ID starting with 3846a2ada7feac8d84a5a8ccdce365822db9985774ef32687db53979d7da2ea0 not found: ID does not exist" Oct 03 16:35:49 crc kubenswrapper[5081]: I1003 16:35:49.839267 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" path="/var/lib/kubelet/pods/22aa209f-c00a-43ad-8f9e-e7b7742160ea/volumes" Oct 03 16:36:00 crc kubenswrapper[5081]: I1003 16:36:00.647114 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:36:00 crc kubenswrapper[5081]: I1003 16:36:00.647718 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:36:30 crc kubenswrapper[5081]: I1003 16:36:30.647758 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:36:30 crc kubenswrapper[5081]: I1003 16:36:30.648733 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:37:00 crc kubenswrapper[5081]: I1003 16:37:00.648080 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:37:00 crc kubenswrapper[5081]: I1003 16:37:00.648733 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:37:00 crc kubenswrapper[5081]: I1003 16:37:00.648785 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:37:00 crc kubenswrapper[5081]: I1003 16:37:00.649549 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b70279e791bfbe86746508ae7855dd30f11246a0be0820c48470a16988c251dd"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:37:00 crc kubenswrapper[5081]: I1003 16:37:00.649645 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://b70279e791bfbe86746508ae7855dd30f11246a0be0820c48470a16988c251dd" gracePeriod=600 Oct 03 16:37:01 crc kubenswrapper[5081]: I1003 16:37:01.730051 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="b70279e791bfbe86746508ae7855dd30f11246a0be0820c48470a16988c251dd" exitCode=0 Oct 03 16:37:01 crc kubenswrapper[5081]: I1003 16:37:01.730118 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"b70279e791bfbe86746508ae7855dd30f11246a0be0820c48470a16988c251dd"} Oct 03 16:37:01 crc kubenswrapper[5081]: I1003 16:37:01.730686 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7"} Oct 03 16:37:01 crc kubenswrapper[5081]: I1003 16:37:01.730717 5081 scope.go:117] "RemoveContainer" containerID="ce32e6df372614d10f63b3b6465aefcab2347ba0766d5c0a504fcc78bd7ac5f6" Oct 03 16:39:00 crc kubenswrapper[5081]: I1003 16:39:00.647602 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:39:00 crc kubenswrapper[5081]: I1003 16:39:00.648219 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:39:30 crc kubenswrapper[5081]: I1003 16:39:30.647986 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:39:30 crc kubenswrapper[5081]: I1003 16:39:30.648794 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:40:00 crc kubenswrapper[5081]: I1003 16:40:00.647811 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:40:00 crc kubenswrapper[5081]: I1003 16:40:00.648439 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:40:00 crc kubenswrapper[5081]: I1003 16:40:00.648492 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:40:00 crc kubenswrapper[5081]: I1003 16:40:00.649671 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:40:00 crc kubenswrapper[5081]: I1003 16:40:00.649968 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" gracePeriod=600 Oct 03 16:40:00 crc kubenswrapper[5081]: E1003 16:40:00.780522 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:40:01 crc kubenswrapper[5081]: I1003 16:40:01.028927 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" exitCode=0 Oct 03 16:40:01 crc kubenswrapper[5081]: I1003 16:40:01.029034 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7"} Oct 03 16:40:01 crc kubenswrapper[5081]: I1003 16:40:01.029517 5081 scope.go:117] "RemoveContainer" containerID="b70279e791bfbe86746508ae7855dd30f11246a0be0820c48470a16988c251dd" Oct 03 16:40:01 crc kubenswrapper[5081]: I1003 16:40:01.031162 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:40:01 crc kubenswrapper[5081]: E1003 16:40:01.032055 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:40:15 crc kubenswrapper[5081]: I1003 16:40:15.827489 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:40:15 crc kubenswrapper[5081]: E1003 16:40:15.828926 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:40:27 crc kubenswrapper[5081]: I1003 16:40:27.827551 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:40:27 crc kubenswrapper[5081]: E1003 16:40:27.828400 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:40:42 crc kubenswrapper[5081]: I1003 16:40:42.828750 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:40:42 crc kubenswrapper[5081]: E1003 16:40:42.830287 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:40:57 crc kubenswrapper[5081]: I1003 16:40:57.827736 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:40:57 crc kubenswrapper[5081]: E1003 16:40:57.828517 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:41:10 crc kubenswrapper[5081]: I1003 16:41:10.827051 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:41:10 crc kubenswrapper[5081]: E1003 16:41:10.827825 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:41:24 crc kubenswrapper[5081]: I1003 16:41:24.827156 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:41:24 crc kubenswrapper[5081]: E1003 16:41:24.827966 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:41:38 crc kubenswrapper[5081]: I1003 16:41:38.827111 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:41:38 crc kubenswrapper[5081]: E1003 16:41:38.828837 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:41:52 crc kubenswrapper[5081]: I1003 16:41:52.828456 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:41:52 crc kubenswrapper[5081]: E1003 16:41:52.829671 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.827045 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-chdc2"] Oct 03 16:42:00 crc kubenswrapper[5081]: E1003 16:42:00.827950 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="registry-server" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.827963 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="registry-server" Oct 03 16:42:00 crc kubenswrapper[5081]: E1003 16:42:00.827981 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="extract-utilities" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.827988 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="extract-utilities" Oct 03 16:42:00 crc kubenswrapper[5081]: E1003 16:42:00.828000 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="extract-content" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.828006 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="extract-content" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.828179 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="22aa209f-c00a-43ad-8f9e-e7b7742160ea" containerName="registry-server" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.829315 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.849145 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chdc2"] Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.863862 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnc97\" (UniqueName: \"kubernetes.io/projected/989ffdd1-2190-41dc-aa7d-163094a715eb-kube-api-access-vnc97\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.863917 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-catalog-content\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.863963 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-utilities\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.965289 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnc97\" (UniqueName: \"kubernetes.io/projected/989ffdd1-2190-41dc-aa7d-163094a715eb-kube-api-access-vnc97\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.965343 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-catalog-content\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.965384 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-utilities\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.966189 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-catalog-content\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.966227 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-utilities\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:00 crc kubenswrapper[5081]: I1003 16:42:00.986702 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnc97\" (UniqueName: \"kubernetes.io/projected/989ffdd1-2190-41dc-aa7d-163094a715eb-kube-api-access-vnc97\") pod \"redhat-operators-chdc2\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:01 crc kubenswrapper[5081]: I1003 16:42:01.149288 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:01 crc kubenswrapper[5081]: I1003 16:42:01.628174 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chdc2"] Oct 03 16:42:01 crc kubenswrapper[5081]: I1003 16:42:01.902595 5081 generic.go:334] "Generic (PLEG): container finished" podID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerID="8ad9ddf386f38106198ec0ebb635fb36d98a1f8caec84003f14ae65faa87c96e" exitCode=0 Oct 03 16:42:01 crc kubenswrapper[5081]: I1003 16:42:01.902848 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chdc2" event={"ID":"989ffdd1-2190-41dc-aa7d-163094a715eb","Type":"ContainerDied","Data":"8ad9ddf386f38106198ec0ebb635fb36d98a1f8caec84003f14ae65faa87c96e"} Oct 03 16:42:01 crc kubenswrapper[5081]: I1003 16:42:01.902955 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chdc2" event={"ID":"989ffdd1-2190-41dc-aa7d-163094a715eb","Type":"ContainerStarted","Data":"56a317f9f51334f73e09efdb48c5614f2886a0dace2302a6e9190a9fbc621114"} Oct 03 16:42:01 crc kubenswrapper[5081]: I1003 16:42:01.904336 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:42:03 crc kubenswrapper[5081]: I1003 16:42:03.923828 5081 generic.go:334] "Generic (PLEG): container finished" podID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerID="017628f30c2ba3eb1e48087df7e3790f1ffcdbc6a02760fe9de0445b97a87a97" exitCode=0 Oct 03 16:42:03 crc kubenswrapper[5081]: I1003 16:42:03.924022 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chdc2" event={"ID":"989ffdd1-2190-41dc-aa7d-163094a715eb","Type":"ContainerDied","Data":"017628f30c2ba3eb1e48087df7e3790f1ffcdbc6a02760fe9de0445b97a87a97"} Oct 03 16:42:04 crc kubenswrapper[5081]: I1003 16:42:04.934990 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chdc2" event={"ID":"989ffdd1-2190-41dc-aa7d-163094a715eb","Type":"ContainerStarted","Data":"9c25b164b75878ca177c6d6ce80c4e5f8fd4a1630631d55a402e079524ef730a"} Oct 03 16:42:04 crc kubenswrapper[5081]: I1003 16:42:04.963113 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-chdc2" podStartSLOduration=2.271791944 podStartE2EDuration="4.963086801s" podCreationTimestamp="2025-10-03 16:42:00 +0000 UTC" firstStartedPulling="2025-10-03 16:42:01.904055174 +0000 UTC m=+4440.869611787" lastFinishedPulling="2025-10-03 16:42:04.595350031 +0000 UTC m=+4443.560906644" observedRunningTime="2025-10-03 16:42:04.95813914 +0000 UTC m=+4443.923695773" watchObservedRunningTime="2025-10-03 16:42:04.963086801 +0000 UTC m=+4443.928643434" Oct 03 16:42:05 crc kubenswrapper[5081]: I1003 16:42:05.827825 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:42:05 crc kubenswrapper[5081]: E1003 16:42:05.828123 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:42:11 crc kubenswrapper[5081]: I1003 16:42:11.150247 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:11 crc kubenswrapper[5081]: I1003 16:42:11.151783 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:11 crc kubenswrapper[5081]: I1003 16:42:11.192469 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:12 crc kubenswrapper[5081]: I1003 16:42:12.018185 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:12 crc kubenswrapper[5081]: I1003 16:42:12.071342 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-chdc2"] Oct 03 16:42:13 crc kubenswrapper[5081]: I1003 16:42:13.990886 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-chdc2" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="registry-server" containerID="cri-o://9c25b164b75878ca177c6d6ce80c4e5f8fd4a1630631d55a402e079524ef730a" gracePeriod=2 Oct 03 16:42:14 crc kubenswrapper[5081]: I1003 16:42:14.999052 5081 generic.go:334] "Generic (PLEG): container finished" podID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerID="9c25b164b75878ca177c6d6ce80c4e5f8fd4a1630631d55a402e079524ef730a" exitCode=0 Oct 03 16:42:14 crc kubenswrapper[5081]: I1003 16:42:14.999104 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chdc2" event={"ID":"989ffdd1-2190-41dc-aa7d-163094a715eb","Type":"ContainerDied","Data":"9c25b164b75878ca177c6d6ce80c4e5f8fd4a1630631d55a402e079524ef730a"} Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.560046 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.614893 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-catalog-content\") pod \"989ffdd1-2190-41dc-aa7d-163094a715eb\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.614966 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-utilities\") pod \"989ffdd1-2190-41dc-aa7d-163094a715eb\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.615129 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnc97\" (UniqueName: \"kubernetes.io/projected/989ffdd1-2190-41dc-aa7d-163094a715eb-kube-api-access-vnc97\") pod \"989ffdd1-2190-41dc-aa7d-163094a715eb\" (UID: \"989ffdd1-2190-41dc-aa7d-163094a715eb\") " Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.616178 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-utilities" (OuterVolumeSpecName: "utilities") pod "989ffdd1-2190-41dc-aa7d-163094a715eb" (UID: "989ffdd1-2190-41dc-aa7d-163094a715eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.621274 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/989ffdd1-2190-41dc-aa7d-163094a715eb-kube-api-access-vnc97" (OuterVolumeSpecName: "kube-api-access-vnc97") pod "989ffdd1-2190-41dc-aa7d-163094a715eb" (UID: "989ffdd1-2190-41dc-aa7d-163094a715eb"). InnerVolumeSpecName "kube-api-access-vnc97". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.698037 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "989ffdd1-2190-41dc-aa7d-163094a715eb" (UID: "989ffdd1-2190-41dc-aa7d-163094a715eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.716211 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.716246 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/989ffdd1-2190-41dc-aa7d-163094a715eb-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:42:15 crc kubenswrapper[5081]: I1003 16:42:15.716259 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnc97\" (UniqueName: \"kubernetes.io/projected/989ffdd1-2190-41dc-aa7d-163094a715eb-kube-api-access-vnc97\") on node \"crc\" DevicePath \"\"" Oct 03 16:42:16 crc kubenswrapper[5081]: I1003 16:42:16.006601 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chdc2" event={"ID":"989ffdd1-2190-41dc-aa7d-163094a715eb","Type":"ContainerDied","Data":"56a317f9f51334f73e09efdb48c5614f2886a0dace2302a6e9190a9fbc621114"} Oct 03 16:42:16 crc kubenswrapper[5081]: I1003 16:42:16.006714 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chdc2" Oct 03 16:42:16 crc kubenswrapper[5081]: I1003 16:42:16.006892 5081 scope.go:117] "RemoveContainer" containerID="9c25b164b75878ca177c6d6ce80c4e5f8fd4a1630631d55a402e079524ef730a" Oct 03 16:42:16 crc kubenswrapper[5081]: I1003 16:42:16.025448 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-chdc2"] Oct 03 16:42:16 crc kubenswrapper[5081]: I1003 16:42:16.030833 5081 scope.go:117] "RemoveContainer" containerID="017628f30c2ba3eb1e48087df7e3790f1ffcdbc6a02760fe9de0445b97a87a97" Oct 03 16:42:16 crc kubenswrapper[5081]: I1003 16:42:16.034181 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-chdc2"] Oct 03 16:42:16 crc kubenswrapper[5081]: I1003 16:42:16.049426 5081 scope.go:117] "RemoveContainer" containerID="8ad9ddf386f38106198ec0ebb635fb36d98a1f8caec84003f14ae65faa87c96e" Oct 03 16:42:17 crc kubenswrapper[5081]: I1003 16:42:17.836459 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" path="/var/lib/kubelet/pods/989ffdd1-2190-41dc-aa7d-163094a715eb/volumes" Oct 03 16:42:20 crc kubenswrapper[5081]: I1003 16:42:20.828069 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:42:20 crc kubenswrapper[5081]: E1003 16:42:20.828689 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:42:22 crc kubenswrapper[5081]: I1003 16:42:22.951924 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mlwdc"] Oct 03 16:42:22 crc kubenswrapper[5081]: E1003 16:42:22.952238 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="registry-server" Oct 03 16:42:22 crc kubenswrapper[5081]: I1003 16:42:22.952254 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="registry-server" Oct 03 16:42:22 crc kubenswrapper[5081]: E1003 16:42:22.952276 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="extract-utilities" Oct 03 16:42:22 crc kubenswrapper[5081]: I1003 16:42:22.952284 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="extract-utilities" Oct 03 16:42:22 crc kubenswrapper[5081]: E1003 16:42:22.952311 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="extract-content" Oct 03 16:42:22 crc kubenswrapper[5081]: I1003 16:42:22.952321 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="extract-content" Oct 03 16:42:22 crc kubenswrapper[5081]: I1003 16:42:22.952492 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="989ffdd1-2190-41dc-aa7d-163094a715eb" containerName="registry-server" Oct 03 16:42:22 crc kubenswrapper[5081]: I1003 16:42:22.953546 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.010346 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlwdc"] Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.112033 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-catalog-content\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.112148 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-utilities\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.112173 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phjzg\" (UniqueName: \"kubernetes.io/projected/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-kube-api-access-phjzg\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.213324 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-catalog-content\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.213481 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-utilities\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.213515 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phjzg\" (UniqueName: \"kubernetes.io/projected/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-kube-api-access-phjzg\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.213812 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-catalog-content\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.213910 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-utilities\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.236471 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phjzg\" (UniqueName: \"kubernetes.io/projected/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-kube-api-access-phjzg\") pod \"redhat-marketplace-mlwdc\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.275080 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:23 crc kubenswrapper[5081]: I1003 16:42:23.679072 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlwdc"] Oct 03 16:42:24 crc kubenswrapper[5081]: I1003 16:42:24.060131 5081 generic.go:334] "Generic (PLEG): container finished" podID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerID="82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605" exitCode=0 Oct 03 16:42:24 crc kubenswrapper[5081]: I1003 16:42:24.061274 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlwdc" event={"ID":"583c83ef-0bb8-4f9b-ab5c-6c6da8617029","Type":"ContainerDied","Data":"82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605"} Oct 03 16:42:24 crc kubenswrapper[5081]: I1003 16:42:24.061431 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlwdc" event={"ID":"583c83ef-0bb8-4f9b-ab5c-6c6da8617029","Type":"ContainerStarted","Data":"4d93a2db2727960a841fd1c387c95d9e3e7ce9a6d5910769c48dc3ef09b8aef6"} Oct 03 16:42:25 crc kubenswrapper[5081]: I1003 16:42:25.071062 5081 generic.go:334] "Generic (PLEG): container finished" podID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerID="fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293" exitCode=0 Oct 03 16:42:25 crc kubenswrapper[5081]: I1003 16:42:25.071145 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlwdc" event={"ID":"583c83ef-0bb8-4f9b-ab5c-6c6da8617029","Type":"ContainerDied","Data":"fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293"} Oct 03 16:42:26 crc kubenswrapper[5081]: I1003 16:42:26.084435 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlwdc" event={"ID":"583c83ef-0bb8-4f9b-ab5c-6c6da8617029","Type":"ContainerStarted","Data":"3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d"} Oct 03 16:42:26 crc kubenswrapper[5081]: I1003 16:42:26.108501 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mlwdc" podStartSLOduration=2.633468558 podStartE2EDuration="4.108482745s" podCreationTimestamp="2025-10-03 16:42:22 +0000 UTC" firstStartedPulling="2025-10-03 16:42:24.062080492 +0000 UTC m=+4463.027637105" lastFinishedPulling="2025-10-03 16:42:25.537094679 +0000 UTC m=+4464.502651292" observedRunningTime="2025-10-03 16:42:26.106766556 +0000 UTC m=+4465.072323179" watchObservedRunningTime="2025-10-03 16:42:26.108482745 +0000 UTC m=+4465.074039358" Oct 03 16:42:32 crc kubenswrapper[5081]: I1003 16:42:32.827652 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:42:32 crc kubenswrapper[5081]: E1003 16:42:32.829227 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:42:33 crc kubenswrapper[5081]: I1003 16:42:33.276124 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:33 crc kubenswrapper[5081]: I1003 16:42:33.276178 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:33 crc kubenswrapper[5081]: I1003 16:42:33.329328 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:34 crc kubenswrapper[5081]: I1003 16:42:34.188731 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:34 crc kubenswrapper[5081]: I1003 16:42:34.229000 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlwdc"] Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.161629 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mlwdc" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="registry-server" containerID="cri-o://3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d" gracePeriod=2 Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.558300 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.709782 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phjzg\" (UniqueName: \"kubernetes.io/projected/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-kube-api-access-phjzg\") pod \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.709848 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-utilities\") pod \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.709876 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-catalog-content\") pod \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\" (UID: \"583c83ef-0bb8-4f9b-ab5c-6c6da8617029\") " Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.710806 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-utilities" (OuterVolumeSpecName: "utilities") pod "583c83ef-0bb8-4f9b-ab5c-6c6da8617029" (UID: "583c83ef-0bb8-4f9b-ab5c-6c6da8617029"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.716853 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-kube-api-access-phjzg" (OuterVolumeSpecName: "kube-api-access-phjzg") pod "583c83ef-0bb8-4f9b-ab5c-6c6da8617029" (UID: "583c83ef-0bb8-4f9b-ab5c-6c6da8617029"). InnerVolumeSpecName "kube-api-access-phjzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.722757 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "583c83ef-0bb8-4f9b-ab5c-6c6da8617029" (UID: "583c83ef-0bb8-4f9b-ab5c-6c6da8617029"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.811814 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phjzg\" (UniqueName: \"kubernetes.io/projected/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-kube-api-access-phjzg\") on node \"crc\" DevicePath \"\"" Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.811848 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:42:36 crc kubenswrapper[5081]: I1003 16:42:36.811857 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/583c83ef-0bb8-4f9b-ab5c-6c6da8617029-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.170229 5081 generic.go:334] "Generic (PLEG): container finished" podID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerID="3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d" exitCode=0 Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.170341 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlwdc" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.170309 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlwdc" event={"ID":"583c83ef-0bb8-4f9b-ab5c-6c6da8617029","Type":"ContainerDied","Data":"3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d"} Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.171288 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlwdc" event={"ID":"583c83ef-0bb8-4f9b-ab5c-6c6da8617029","Type":"ContainerDied","Data":"4d93a2db2727960a841fd1c387c95d9e3e7ce9a6d5910769c48dc3ef09b8aef6"} Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.171316 5081 scope.go:117] "RemoveContainer" containerID="3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.197346 5081 scope.go:117] "RemoveContainer" containerID="fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.199472 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlwdc"] Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.206407 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlwdc"] Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.231055 5081 scope.go:117] "RemoveContainer" containerID="82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.247642 5081 scope.go:117] "RemoveContainer" containerID="3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d" Oct 03 16:42:37 crc kubenswrapper[5081]: E1003 16:42:37.248127 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d\": container with ID starting with 3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d not found: ID does not exist" containerID="3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.248172 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d"} err="failed to get container status \"3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d\": rpc error: code = NotFound desc = could not find container \"3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d\": container with ID starting with 3eb4e1933f767d4e8ceb772acd96a28656976004fdf2ed9011d277647797798d not found: ID does not exist" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.248205 5081 scope.go:117] "RemoveContainer" containerID="fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293" Oct 03 16:42:37 crc kubenswrapper[5081]: E1003 16:42:37.248538 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293\": container with ID starting with fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293 not found: ID does not exist" containerID="fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.248712 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293"} err="failed to get container status \"fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293\": rpc error: code = NotFound desc = could not find container \"fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293\": container with ID starting with fc52e533b838fa3252646ede5d4204d794f75cb3ed7a1a671f180448832ce293 not found: ID does not exist" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.248799 5081 scope.go:117] "RemoveContainer" containerID="82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605" Oct 03 16:42:37 crc kubenswrapper[5081]: E1003 16:42:37.249211 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605\": container with ID starting with 82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605 not found: ID does not exist" containerID="82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.249265 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605"} err="failed to get container status \"82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605\": rpc error: code = NotFound desc = could not find container \"82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605\": container with ID starting with 82dac4998a695971700411525ff6202f4d5c28edf330330faf15bf573d8b3605 not found: ID does not exist" Oct 03 16:42:37 crc kubenswrapper[5081]: I1003 16:42:37.836747 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" path="/var/lib/kubelet/pods/583c83ef-0bb8-4f9b-ab5c-6c6da8617029/volumes" Oct 03 16:42:44 crc kubenswrapper[5081]: I1003 16:42:44.827393 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:42:44 crc kubenswrapper[5081]: E1003 16:42:44.829164 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:42:56 crc kubenswrapper[5081]: I1003 16:42:56.827770 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:42:56 crc kubenswrapper[5081]: E1003 16:42:56.828628 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:43:11 crc kubenswrapper[5081]: I1003 16:43:11.831978 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:43:11 crc kubenswrapper[5081]: E1003 16:43:11.832679 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:43:25 crc kubenswrapper[5081]: I1003 16:43:25.828458 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:43:25 crc kubenswrapper[5081]: E1003 16:43:25.831236 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:43:38 crc kubenswrapper[5081]: I1003 16:43:38.827744 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:43:38 crc kubenswrapper[5081]: E1003 16:43:38.828511 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:43:51 crc kubenswrapper[5081]: I1003 16:43:51.831752 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:43:51 crc kubenswrapper[5081]: E1003 16:43:51.833667 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:44:02 crc kubenswrapper[5081]: I1003 16:44:02.827257 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:44:02 crc kubenswrapper[5081]: E1003 16:44:02.828069 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.268257 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-n8c8h"] Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.274886 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-n8c8h"] Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.425358 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-5tkz4"] Oct 03 16:44:08 crc kubenswrapper[5081]: E1003 16:44:08.425684 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="registry-server" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.425702 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="registry-server" Oct 03 16:44:08 crc kubenswrapper[5081]: E1003 16:44:08.425720 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="extract-content" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.425726 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="extract-content" Oct 03 16:44:08 crc kubenswrapper[5081]: E1003 16:44:08.425735 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="extract-utilities" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.425742 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="extract-utilities" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.425905 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="583c83ef-0bb8-4f9b-ab5c-6c6da8617029" containerName="registry-server" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.426548 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.428337 5081 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-vg5g2" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.428368 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.428886 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.430099 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.441055 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5tkz4"] Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.537766 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fd5f53ec-d90a-483f-ac8e-5171e84656c8-crc-storage\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.537904 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rntx6\" (UniqueName: \"kubernetes.io/projected/fd5f53ec-d90a-483f-ac8e-5171e84656c8-kube-api-access-rntx6\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.537970 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fd5f53ec-d90a-483f-ac8e-5171e84656c8-node-mnt\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.639916 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rntx6\" (UniqueName: \"kubernetes.io/projected/fd5f53ec-d90a-483f-ac8e-5171e84656c8-kube-api-access-rntx6\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.640006 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fd5f53ec-d90a-483f-ac8e-5171e84656c8-node-mnt\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.640047 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fd5f53ec-d90a-483f-ac8e-5171e84656c8-crc-storage\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.640350 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fd5f53ec-d90a-483f-ac8e-5171e84656c8-node-mnt\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.640831 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fd5f53ec-d90a-483f-ac8e-5171e84656c8-crc-storage\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.661894 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rntx6\" (UniqueName: \"kubernetes.io/projected/fd5f53ec-d90a-483f-ac8e-5171e84656c8-kube-api-access-rntx6\") pod \"crc-storage-crc-5tkz4\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.745970 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:08 crc kubenswrapper[5081]: I1003 16:44:08.978214 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5tkz4"] Oct 03 16:44:09 crc kubenswrapper[5081]: I1003 16:44:09.838327 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b" path="/var/lib/kubelet/pods/2bc2f14a-f35f-49cd-9c2a-c581ae3deb7b/volumes" Oct 03 16:44:09 crc kubenswrapper[5081]: I1003 16:44:09.862490 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5tkz4" event={"ID":"fd5f53ec-d90a-483f-ac8e-5171e84656c8","Type":"ContainerStarted","Data":"b037ef1a4f940e1519d246002603160f20cef5601749bc02a6c60cd5e2b11eca"} Oct 03 16:44:10 crc kubenswrapper[5081]: I1003 16:44:10.870462 5081 generic.go:334] "Generic (PLEG): container finished" podID="fd5f53ec-d90a-483f-ac8e-5171e84656c8" containerID="1ad392a8baeb0eaf84cc391e0ccee1a6bada9c8f709f7d2c12309dc5d9876832" exitCode=0 Oct 03 16:44:10 crc kubenswrapper[5081]: I1003 16:44:10.870842 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5tkz4" event={"ID":"fd5f53ec-d90a-483f-ac8e-5171e84656c8","Type":"ContainerDied","Data":"1ad392a8baeb0eaf84cc391e0ccee1a6bada9c8f709f7d2c12309dc5d9876832"} Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.145127 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.295392 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fd5f53ec-d90a-483f-ac8e-5171e84656c8-crc-storage\") pod \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.295465 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fd5f53ec-d90a-483f-ac8e-5171e84656c8-node-mnt\") pod \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.295513 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rntx6\" (UniqueName: \"kubernetes.io/projected/fd5f53ec-d90a-483f-ac8e-5171e84656c8-kube-api-access-rntx6\") pod \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\" (UID: \"fd5f53ec-d90a-483f-ac8e-5171e84656c8\") " Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.295830 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd5f53ec-d90a-483f-ac8e-5171e84656c8-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "fd5f53ec-d90a-483f-ac8e-5171e84656c8" (UID: "fd5f53ec-d90a-483f-ac8e-5171e84656c8"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.296348 5081 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fd5f53ec-d90a-483f-ac8e-5171e84656c8-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.300696 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd5f53ec-d90a-483f-ac8e-5171e84656c8-kube-api-access-rntx6" (OuterVolumeSpecName: "kube-api-access-rntx6") pod "fd5f53ec-d90a-483f-ac8e-5171e84656c8" (UID: "fd5f53ec-d90a-483f-ac8e-5171e84656c8"). InnerVolumeSpecName "kube-api-access-rntx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.313145 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd5f53ec-d90a-483f-ac8e-5171e84656c8-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "fd5f53ec-d90a-483f-ac8e-5171e84656c8" (UID: "fd5f53ec-d90a-483f-ac8e-5171e84656c8"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.398078 5081 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fd5f53ec-d90a-483f-ac8e-5171e84656c8-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.398397 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rntx6\" (UniqueName: \"kubernetes.io/projected/fd5f53ec-d90a-483f-ac8e-5171e84656c8-kube-api-access-rntx6\") on node \"crc\" DevicePath \"\"" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.885719 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5tkz4" event={"ID":"fd5f53ec-d90a-483f-ac8e-5171e84656c8","Type":"ContainerDied","Data":"b037ef1a4f940e1519d246002603160f20cef5601749bc02a6c60cd5e2b11eca"} Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.885775 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5tkz4" Oct 03 16:44:12 crc kubenswrapper[5081]: I1003 16:44:12.885797 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b037ef1a4f940e1519d246002603160f20cef5601749bc02a6c60cd5e2b11eca" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.379167 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-5tkz4"] Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.385229 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-5tkz4"] Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.507733 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-jd9pp"] Oct 03 16:44:14 crc kubenswrapper[5081]: E1003 16:44:14.508058 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5f53ec-d90a-483f-ac8e-5171e84656c8" containerName="storage" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.508077 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5f53ec-d90a-483f-ac8e-5171e84656c8" containerName="storage" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.508211 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5f53ec-d90a-483f-ac8e-5171e84656c8" containerName="storage" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.508696 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.516982 5081 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-vg5g2" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.518590 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.519030 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.521682 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-jd9pp"] Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.529650 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.629598 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5771ac93-3093-4145-a418-68e9b998c5d8-crc-storage\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.629881 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5771ac93-3093-4145-a418-68e9b998c5d8-node-mnt\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.629985 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfnkx\" (UniqueName: \"kubernetes.io/projected/5771ac93-3093-4145-a418-68e9b998c5d8-kube-api-access-qfnkx\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.731211 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5771ac93-3093-4145-a418-68e9b998c5d8-node-mnt\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.731293 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfnkx\" (UniqueName: \"kubernetes.io/projected/5771ac93-3093-4145-a418-68e9b998c5d8-kube-api-access-qfnkx\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.731362 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5771ac93-3093-4145-a418-68e9b998c5d8-crc-storage\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.731658 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5771ac93-3093-4145-a418-68e9b998c5d8-node-mnt\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.732252 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5771ac93-3093-4145-a418-68e9b998c5d8-crc-storage\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.750039 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfnkx\" (UniqueName: \"kubernetes.io/projected/5771ac93-3093-4145-a418-68e9b998c5d8-kube-api-access-qfnkx\") pod \"crc-storage-crc-jd9pp\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:14 crc kubenswrapper[5081]: I1003 16:44:14.838927 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:15 crc kubenswrapper[5081]: I1003 16:44:15.263897 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-jd9pp"] Oct 03 16:44:15 crc kubenswrapper[5081]: I1003 16:44:15.837854 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd5f53ec-d90a-483f-ac8e-5171e84656c8" path="/var/lib/kubelet/pods/fd5f53ec-d90a-483f-ac8e-5171e84656c8/volumes" Oct 03 16:44:15 crc kubenswrapper[5081]: I1003 16:44:15.908549 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jd9pp" event={"ID":"5771ac93-3093-4145-a418-68e9b998c5d8","Type":"ContainerStarted","Data":"f60981a2247ba932e26801a0fe64ffe726dc3bd929194af094c2e8ce06c0c86e"} Oct 03 16:44:16 crc kubenswrapper[5081]: I1003 16:44:16.920020 5081 generic.go:334] "Generic (PLEG): container finished" podID="5771ac93-3093-4145-a418-68e9b998c5d8" containerID="21e18de452a1053a3fc5c2471a28a4b8e8d70bfbc58934ca45a19100b21f4bad" exitCode=0 Oct 03 16:44:16 crc kubenswrapper[5081]: I1003 16:44:16.920072 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jd9pp" event={"ID":"5771ac93-3093-4145-a418-68e9b998c5d8","Type":"ContainerDied","Data":"21e18de452a1053a3fc5c2471a28a4b8e8d70bfbc58934ca45a19100b21f4bad"} Oct 03 16:44:17 crc kubenswrapper[5081]: I1003 16:44:17.828085 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:44:17 crc kubenswrapper[5081]: E1003 16:44:17.828393 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.203057 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.287000 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5771ac93-3093-4145-a418-68e9b998c5d8-crc-storage\") pod \"5771ac93-3093-4145-a418-68e9b998c5d8\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.287162 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfnkx\" (UniqueName: \"kubernetes.io/projected/5771ac93-3093-4145-a418-68e9b998c5d8-kube-api-access-qfnkx\") pod \"5771ac93-3093-4145-a418-68e9b998c5d8\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.287332 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5771ac93-3093-4145-a418-68e9b998c5d8-node-mnt\") pod \"5771ac93-3093-4145-a418-68e9b998c5d8\" (UID: \"5771ac93-3093-4145-a418-68e9b998c5d8\") " Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.287552 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5771ac93-3093-4145-a418-68e9b998c5d8-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "5771ac93-3093-4145-a418-68e9b998c5d8" (UID: "5771ac93-3093-4145-a418-68e9b998c5d8"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.287800 5081 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5771ac93-3093-4145-a418-68e9b998c5d8-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.293788 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5771ac93-3093-4145-a418-68e9b998c5d8-kube-api-access-qfnkx" (OuterVolumeSpecName: "kube-api-access-qfnkx") pod "5771ac93-3093-4145-a418-68e9b998c5d8" (UID: "5771ac93-3093-4145-a418-68e9b998c5d8"). InnerVolumeSpecName "kube-api-access-qfnkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.330478 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5771ac93-3093-4145-a418-68e9b998c5d8-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "5771ac93-3093-4145-a418-68e9b998c5d8" (UID: "5771ac93-3093-4145-a418-68e9b998c5d8"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.389214 5081 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5771ac93-3093-4145-a418-68e9b998c5d8-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.389255 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfnkx\" (UniqueName: \"kubernetes.io/projected/5771ac93-3093-4145-a418-68e9b998c5d8-kube-api-access-qfnkx\") on node \"crc\" DevicePath \"\"" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.939204 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jd9pp" event={"ID":"5771ac93-3093-4145-a418-68e9b998c5d8","Type":"ContainerDied","Data":"f60981a2247ba932e26801a0fe64ffe726dc3bd929194af094c2e8ce06c0c86e"} Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.939301 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f60981a2247ba932e26801a0fe64ffe726dc3bd929194af094c2e8ce06c0c86e" Oct 03 16:44:18 crc kubenswrapper[5081]: I1003 16:44:18.939246 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jd9pp" Oct 03 16:44:22 crc kubenswrapper[5081]: I1003 16:44:22.039669 5081 scope.go:117] "RemoveContainer" containerID="0e2eccbc2cfbc6e67f183377abba5cb02ae7e1b821f3aa1d4ca9daabe2879129" Oct 03 16:44:32 crc kubenswrapper[5081]: I1003 16:44:32.827680 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:44:32 crc kubenswrapper[5081]: E1003 16:44:32.828419 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:44:44 crc kubenswrapper[5081]: I1003 16:44:44.827469 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:44:44 crc kubenswrapper[5081]: E1003 16:44:44.828272 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:44:55 crc kubenswrapper[5081]: I1003 16:44:55.827919 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:44:55 crc kubenswrapper[5081]: E1003 16:44:55.828715 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.141812 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl"] Oct 03 16:45:00 crc kubenswrapper[5081]: E1003 16:45:00.142158 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5771ac93-3093-4145-a418-68e9b998c5d8" containerName="storage" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.142169 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5771ac93-3093-4145-a418-68e9b998c5d8" containerName="storage" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.142322 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5771ac93-3093-4145-a418-68e9b998c5d8" containerName="storage" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.142811 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.145659 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.147050 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.155402 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl"] Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.202308 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnkcc\" (UniqueName: \"kubernetes.io/projected/8cf86ba3-04ed-4956-9a95-90f23ef359a6-kube-api-access-mnkcc\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.202756 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cf86ba3-04ed-4956-9a95-90f23ef359a6-config-volume\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.202806 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8cf86ba3-04ed-4956-9a95-90f23ef359a6-secret-volume\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.304607 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnkcc\" (UniqueName: \"kubernetes.io/projected/8cf86ba3-04ed-4956-9a95-90f23ef359a6-kube-api-access-mnkcc\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.304677 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cf86ba3-04ed-4956-9a95-90f23ef359a6-config-volume\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.304723 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8cf86ba3-04ed-4956-9a95-90f23ef359a6-secret-volume\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.305901 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cf86ba3-04ed-4956-9a95-90f23ef359a6-config-volume\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.317951 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8cf86ba3-04ed-4956-9a95-90f23ef359a6-secret-volume\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.320822 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnkcc\" (UniqueName: \"kubernetes.io/projected/8cf86ba3-04ed-4956-9a95-90f23ef359a6-kube-api-access-mnkcc\") pod \"collect-profiles-29325165-j74hl\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.470632 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:00 crc kubenswrapper[5081]: I1003 16:45:00.881247 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl"] Oct 03 16:45:01 crc kubenswrapper[5081]: I1003 16:45:01.234844 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" event={"ID":"8cf86ba3-04ed-4956-9a95-90f23ef359a6","Type":"ContainerStarted","Data":"8ab05bba877dc43f626c8a2d601d7e71a37eef5979b4a36a712c3dcca43ce6a0"} Oct 03 16:45:01 crc kubenswrapper[5081]: I1003 16:45:01.234889 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" event={"ID":"8cf86ba3-04ed-4956-9a95-90f23ef359a6","Type":"ContainerStarted","Data":"5ab1037bcd487c12f751e59b87e5daab1f141c571bf6c54ce15bdde6addcb104"} Oct 03 16:45:02 crc kubenswrapper[5081]: I1003 16:45:02.252769 5081 generic.go:334] "Generic (PLEG): container finished" podID="8cf86ba3-04ed-4956-9a95-90f23ef359a6" containerID="8ab05bba877dc43f626c8a2d601d7e71a37eef5979b4a36a712c3dcca43ce6a0" exitCode=0 Oct 03 16:45:02 crc kubenswrapper[5081]: I1003 16:45:02.252927 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" event={"ID":"8cf86ba3-04ed-4956-9a95-90f23ef359a6","Type":"ContainerDied","Data":"8ab05bba877dc43f626c8a2d601d7e71a37eef5979b4a36a712c3dcca43ce6a0"} Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.512015 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.550623 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8cf86ba3-04ed-4956-9a95-90f23ef359a6-secret-volume\") pod \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.550691 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnkcc\" (UniqueName: \"kubernetes.io/projected/8cf86ba3-04ed-4956-9a95-90f23ef359a6-kube-api-access-mnkcc\") pod \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.550740 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cf86ba3-04ed-4956-9a95-90f23ef359a6-config-volume\") pod \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\" (UID: \"8cf86ba3-04ed-4956-9a95-90f23ef359a6\") " Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.552041 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cf86ba3-04ed-4956-9a95-90f23ef359a6-config-volume" (OuterVolumeSpecName: "config-volume") pod "8cf86ba3-04ed-4956-9a95-90f23ef359a6" (UID: "8cf86ba3-04ed-4956-9a95-90f23ef359a6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.556790 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf86ba3-04ed-4956-9a95-90f23ef359a6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8cf86ba3-04ed-4956-9a95-90f23ef359a6" (UID: "8cf86ba3-04ed-4956-9a95-90f23ef359a6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.557739 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cf86ba3-04ed-4956-9a95-90f23ef359a6-kube-api-access-mnkcc" (OuterVolumeSpecName: "kube-api-access-mnkcc") pod "8cf86ba3-04ed-4956-9a95-90f23ef359a6" (UID: "8cf86ba3-04ed-4956-9a95-90f23ef359a6"). InnerVolumeSpecName "kube-api-access-mnkcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.653355 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8cf86ba3-04ed-4956-9a95-90f23ef359a6-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.653424 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnkcc\" (UniqueName: \"kubernetes.io/projected/8cf86ba3-04ed-4956-9a95-90f23ef359a6-kube-api-access-mnkcc\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:03 crc kubenswrapper[5081]: I1003 16:45:03.653437 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cf86ba3-04ed-4956-9a95-90f23ef359a6-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.271240 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8lqdw"] Oct 03 16:45:04 crc kubenswrapper[5081]: E1003 16:45:04.272092 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf86ba3-04ed-4956-9a95-90f23ef359a6" containerName="collect-profiles" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.272136 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf86ba3-04ed-4956-9a95-90f23ef359a6" containerName="collect-profiles" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.272423 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf86ba3-04ed-4956-9a95-90f23ef359a6" containerName="collect-profiles" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.274140 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.279594 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8lqdw"] Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.300214 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" event={"ID":"8cf86ba3-04ed-4956-9a95-90f23ef359a6","Type":"ContainerDied","Data":"5ab1037bcd487c12f751e59b87e5daab1f141c571bf6c54ce15bdde6addcb104"} Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.300283 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ab1037bcd487c12f751e59b87e5daab1f141c571bf6c54ce15bdde6addcb104" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.300350 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.329208 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7"] Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.334167 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325120-qbhb7"] Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.365404 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-catalog-content\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.365494 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2srfm\" (UniqueName: \"kubernetes.io/projected/05fc8149-5d54-4490-8d58-e4dc2d85bf67-kube-api-access-2srfm\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.365579 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-utilities\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.466916 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2srfm\" (UniqueName: \"kubernetes.io/projected/05fc8149-5d54-4490-8d58-e4dc2d85bf67-kube-api-access-2srfm\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.467005 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-utilities\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.467081 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-catalog-content\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.467663 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-catalog-content\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.467805 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-utilities\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.488735 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2srfm\" (UniqueName: \"kubernetes.io/projected/05fc8149-5d54-4490-8d58-e4dc2d85bf67-kube-api-access-2srfm\") pod \"certified-operators-8lqdw\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:04 crc kubenswrapper[5081]: I1003 16:45:04.604855 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:05 crc kubenswrapper[5081]: I1003 16:45:05.111199 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8lqdw"] Oct 03 16:45:05 crc kubenswrapper[5081]: W1003 16:45:05.113369 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05fc8149_5d54_4490_8d58_e4dc2d85bf67.slice/crio-d3647b0f6eab82d1b99337a8b05f9a17483dc3821e3cb420c689fba2640ea104 WatchSource:0}: Error finding container d3647b0f6eab82d1b99337a8b05f9a17483dc3821e3cb420c689fba2640ea104: Status 404 returned error can't find the container with id d3647b0f6eab82d1b99337a8b05f9a17483dc3821e3cb420c689fba2640ea104 Oct 03 16:45:05 crc kubenswrapper[5081]: I1003 16:45:05.308183 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8lqdw" event={"ID":"05fc8149-5d54-4490-8d58-e4dc2d85bf67","Type":"ContainerDied","Data":"907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261"} Oct 03 16:45:05 crc kubenswrapper[5081]: I1003 16:45:05.309127 5081 generic.go:334] "Generic (PLEG): container finished" podID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerID="907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261" exitCode=0 Oct 03 16:45:05 crc kubenswrapper[5081]: I1003 16:45:05.309194 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8lqdw" event={"ID":"05fc8149-5d54-4490-8d58-e4dc2d85bf67","Type":"ContainerStarted","Data":"d3647b0f6eab82d1b99337a8b05f9a17483dc3821e3cb420c689fba2640ea104"} Oct 03 16:45:05 crc kubenswrapper[5081]: I1003 16:45:05.837810 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c2eea03-99be-4909-b9e9-a2e6dbbc28d3" path="/var/lib/kubelet/pods/5c2eea03-99be-4909-b9e9-a2e6dbbc28d3/volumes" Oct 03 16:45:07 crc kubenswrapper[5081]: I1003 16:45:07.324781 5081 generic.go:334] "Generic (PLEG): container finished" podID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerID="70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b" exitCode=0 Oct 03 16:45:07 crc kubenswrapper[5081]: I1003 16:45:07.324839 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8lqdw" event={"ID":"05fc8149-5d54-4490-8d58-e4dc2d85bf67","Type":"ContainerDied","Data":"70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b"} Oct 03 16:45:08 crc kubenswrapper[5081]: I1003 16:45:08.336534 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8lqdw" event={"ID":"05fc8149-5d54-4490-8d58-e4dc2d85bf67","Type":"ContainerStarted","Data":"398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98"} Oct 03 16:45:08 crc kubenswrapper[5081]: I1003 16:45:08.357629 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8lqdw" podStartSLOduration=1.9057224050000001 podStartE2EDuration="4.357614627s" podCreationTimestamp="2025-10-03 16:45:04 +0000 UTC" firstStartedPulling="2025-10-03 16:45:05.309778089 +0000 UTC m=+4624.275334702" lastFinishedPulling="2025-10-03 16:45:07.761670311 +0000 UTC m=+4626.727226924" observedRunningTime="2025-10-03 16:45:08.354693533 +0000 UTC m=+4627.320250156" watchObservedRunningTime="2025-10-03 16:45:08.357614627 +0000 UTC m=+4627.323171240" Oct 03 16:45:09 crc kubenswrapper[5081]: I1003 16:45:09.828785 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:45:10 crc kubenswrapper[5081]: I1003 16:45:10.352037 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"35faa1c942b89ebc4799ceb91808709fd7e10cbf620370c3e31f58d6810401b7"} Oct 03 16:45:14 crc kubenswrapper[5081]: I1003 16:45:14.605190 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:14 crc kubenswrapper[5081]: I1003 16:45:14.605739 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:14 crc kubenswrapper[5081]: I1003 16:45:14.646442 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:15 crc kubenswrapper[5081]: I1003 16:45:15.421151 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:15 crc kubenswrapper[5081]: I1003 16:45:15.470309 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8lqdw"] Oct 03 16:45:17 crc kubenswrapper[5081]: I1003 16:45:17.396371 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8lqdw" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="registry-server" containerID="cri-o://398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98" gracePeriod=2 Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.089978 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.156778 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-catalog-content\") pod \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.157197 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2srfm\" (UniqueName: \"kubernetes.io/projected/05fc8149-5d54-4490-8d58-e4dc2d85bf67-kube-api-access-2srfm\") pod \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.157786 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-utilities\") pod \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\" (UID: \"05fc8149-5d54-4490-8d58-e4dc2d85bf67\") " Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.159145 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-utilities" (OuterVolumeSpecName: "utilities") pod "05fc8149-5d54-4490-8d58-e4dc2d85bf67" (UID: "05fc8149-5d54-4490-8d58-e4dc2d85bf67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.161071 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.164850 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05fc8149-5d54-4490-8d58-e4dc2d85bf67-kube-api-access-2srfm" (OuterVolumeSpecName: "kube-api-access-2srfm") pod "05fc8149-5d54-4490-8d58-e4dc2d85bf67" (UID: "05fc8149-5d54-4490-8d58-e4dc2d85bf67"). InnerVolumeSpecName "kube-api-access-2srfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.210682 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05fc8149-5d54-4490-8d58-e4dc2d85bf67" (UID: "05fc8149-5d54-4490-8d58-e4dc2d85bf67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.262137 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2srfm\" (UniqueName: \"kubernetes.io/projected/05fc8149-5d54-4490-8d58-e4dc2d85bf67-kube-api-access-2srfm\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.262172 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05fc8149-5d54-4490-8d58-e4dc2d85bf67-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.405260 5081 generic.go:334] "Generic (PLEG): container finished" podID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerID="398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98" exitCode=0 Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.405333 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8lqdw" event={"ID":"05fc8149-5d54-4490-8d58-e4dc2d85bf67","Type":"ContainerDied","Data":"398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98"} Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.405352 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8lqdw" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.405378 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8lqdw" event={"ID":"05fc8149-5d54-4490-8d58-e4dc2d85bf67","Type":"ContainerDied","Data":"d3647b0f6eab82d1b99337a8b05f9a17483dc3821e3cb420c689fba2640ea104"} Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.405406 5081 scope.go:117] "RemoveContainer" containerID="398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.433655 5081 scope.go:117] "RemoveContainer" containerID="70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.443448 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8lqdw"] Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.449666 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8lqdw"] Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.478895 5081 scope.go:117] "RemoveContainer" containerID="907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.493987 5081 scope.go:117] "RemoveContainer" containerID="398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98" Oct 03 16:45:18 crc kubenswrapper[5081]: E1003 16:45:18.494527 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98\": container with ID starting with 398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98 not found: ID does not exist" containerID="398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.494591 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98"} err="failed to get container status \"398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98\": rpc error: code = NotFound desc = could not find container \"398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98\": container with ID starting with 398f2e50a306ac42bf85fe050eb19063304218deb1b81cf30ee2d58ded0f0f98 not found: ID does not exist" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.494619 5081 scope.go:117] "RemoveContainer" containerID="70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b" Oct 03 16:45:18 crc kubenswrapper[5081]: E1003 16:45:18.495215 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b\": container with ID starting with 70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b not found: ID does not exist" containerID="70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.495249 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b"} err="failed to get container status \"70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b\": rpc error: code = NotFound desc = could not find container \"70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b\": container with ID starting with 70c409123d7f00aa366dc81926c27a2389af6d0414411847091b3ddef89e722b not found: ID does not exist" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.495270 5081 scope.go:117] "RemoveContainer" containerID="907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261" Oct 03 16:45:18 crc kubenswrapper[5081]: E1003 16:45:18.495710 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261\": container with ID starting with 907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261 not found: ID does not exist" containerID="907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261" Oct 03 16:45:18 crc kubenswrapper[5081]: I1003 16:45:18.495759 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261"} err="failed to get container status \"907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261\": rpc error: code = NotFound desc = could not find container \"907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261\": container with ID starting with 907831b69c7d1d1d25a7a00db219bc01abe9518e65e58de632b696e84843e261 not found: ID does not exist" Oct 03 16:45:19 crc kubenswrapper[5081]: I1003 16:45:19.837962 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" path="/var/lib/kubelet/pods/05fc8149-5d54-4490-8d58-e4dc2d85bf67/volumes" Oct 03 16:45:22 crc kubenswrapper[5081]: I1003 16:45:22.098832 5081 scope.go:117] "RemoveContainer" containerID="64a300e770e0aed506cbe308b2d72bbff77ce3372ec74048e0feff3a43e9e905" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.953461 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9ts2f"] Oct 03 16:45:39 crc kubenswrapper[5081]: E1003 16:45:39.954376 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="extract-utilities" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.954391 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="extract-utilities" Oct 03 16:45:39 crc kubenswrapper[5081]: E1003 16:45:39.954415 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="extract-content" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.954421 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="extract-content" Oct 03 16:45:39 crc kubenswrapper[5081]: E1003 16:45:39.954430 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="registry-server" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.954435 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="registry-server" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.954630 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="05fc8149-5d54-4490-8d58-e4dc2d85bf67" containerName="registry-server" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.956874 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.961279 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ts2f"] Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.969188 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-catalog-content\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.969317 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-utilities\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:39 crc kubenswrapper[5081]: I1003 16:45:39.969340 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46ph8\" (UniqueName: \"kubernetes.io/projected/b8821036-3506-442e-ae63-68f535b9b3dd-kube-api-access-46ph8\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.070516 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-utilities\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.070583 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46ph8\" (UniqueName: \"kubernetes.io/projected/b8821036-3506-442e-ae63-68f535b9b3dd-kube-api-access-46ph8\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.070666 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-catalog-content\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.071049 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-utilities\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.071183 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-catalog-content\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.093414 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46ph8\" (UniqueName: \"kubernetes.io/projected/b8821036-3506-442e-ae63-68f535b9b3dd-kube-api-access-46ph8\") pod \"community-operators-9ts2f\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.279822 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:40 crc kubenswrapper[5081]: I1003 16:45:40.791906 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ts2f"] Oct 03 16:45:41 crc kubenswrapper[5081]: I1003 16:45:41.583214 5081 generic.go:334] "Generic (PLEG): container finished" podID="b8821036-3506-442e-ae63-68f535b9b3dd" containerID="33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55" exitCode=0 Oct 03 16:45:41 crc kubenswrapper[5081]: I1003 16:45:41.583316 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ts2f" event={"ID":"b8821036-3506-442e-ae63-68f535b9b3dd","Type":"ContainerDied","Data":"33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55"} Oct 03 16:45:41 crc kubenswrapper[5081]: I1003 16:45:41.583749 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ts2f" event={"ID":"b8821036-3506-442e-ae63-68f535b9b3dd","Type":"ContainerStarted","Data":"40a9361210e11a46c4f5ecf9c561bdfdaddf6ce5a990e1cb107186d4e7bdc0ef"} Oct 03 16:45:42 crc kubenswrapper[5081]: I1003 16:45:42.592603 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ts2f" event={"ID":"b8821036-3506-442e-ae63-68f535b9b3dd","Type":"ContainerStarted","Data":"df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5"} Oct 03 16:45:43 crc kubenswrapper[5081]: I1003 16:45:43.603283 5081 generic.go:334] "Generic (PLEG): container finished" podID="b8821036-3506-442e-ae63-68f535b9b3dd" containerID="df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5" exitCode=0 Oct 03 16:45:43 crc kubenswrapper[5081]: I1003 16:45:43.603391 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ts2f" event={"ID":"b8821036-3506-442e-ae63-68f535b9b3dd","Type":"ContainerDied","Data":"df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5"} Oct 03 16:45:44 crc kubenswrapper[5081]: I1003 16:45:44.613026 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ts2f" event={"ID":"b8821036-3506-442e-ae63-68f535b9b3dd","Type":"ContainerStarted","Data":"258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b"} Oct 03 16:45:44 crc kubenswrapper[5081]: I1003 16:45:44.632834 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9ts2f" podStartSLOduration=3.213672595 podStartE2EDuration="5.632819271s" podCreationTimestamp="2025-10-03 16:45:39 +0000 UTC" firstStartedPulling="2025-10-03 16:45:41.585241851 +0000 UTC m=+4660.550798464" lastFinishedPulling="2025-10-03 16:45:44.004388527 +0000 UTC m=+4662.969945140" observedRunningTime="2025-10-03 16:45:44.629321821 +0000 UTC m=+4663.594878434" watchObservedRunningTime="2025-10-03 16:45:44.632819271 +0000 UTC m=+4663.598375884" Oct 03 16:45:50 crc kubenswrapper[5081]: I1003 16:45:50.280727 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:50 crc kubenswrapper[5081]: I1003 16:45:50.281370 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:50 crc kubenswrapper[5081]: I1003 16:45:50.333883 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:50 crc kubenswrapper[5081]: I1003 16:45:50.696510 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:50 crc kubenswrapper[5081]: I1003 16:45:50.738572 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ts2f"] Oct 03 16:45:52 crc kubenswrapper[5081]: I1003 16:45:52.669048 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9ts2f" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="registry-server" containerID="cri-o://258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b" gracePeriod=2 Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.034908 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.153249 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-utilities\") pod \"b8821036-3506-442e-ae63-68f535b9b3dd\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.153361 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46ph8\" (UniqueName: \"kubernetes.io/projected/b8821036-3506-442e-ae63-68f535b9b3dd-kube-api-access-46ph8\") pod \"b8821036-3506-442e-ae63-68f535b9b3dd\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.153484 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-catalog-content\") pod \"b8821036-3506-442e-ae63-68f535b9b3dd\" (UID: \"b8821036-3506-442e-ae63-68f535b9b3dd\") " Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.154264 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-utilities" (OuterVolumeSpecName: "utilities") pod "b8821036-3506-442e-ae63-68f535b9b3dd" (UID: "b8821036-3506-442e-ae63-68f535b9b3dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.166805 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8821036-3506-442e-ae63-68f535b9b3dd-kube-api-access-46ph8" (OuterVolumeSpecName: "kube-api-access-46ph8") pod "b8821036-3506-442e-ae63-68f535b9b3dd" (UID: "b8821036-3506-442e-ae63-68f535b9b3dd"). InnerVolumeSpecName "kube-api-access-46ph8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.255595 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.255632 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46ph8\" (UniqueName: \"kubernetes.io/projected/b8821036-3506-442e-ae63-68f535b9b3dd-kube-api-access-46ph8\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.660769 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8821036-3506-442e-ae63-68f535b9b3dd" (UID: "b8821036-3506-442e-ae63-68f535b9b3dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.676942 5081 generic.go:334] "Generic (PLEG): container finished" podID="b8821036-3506-442e-ae63-68f535b9b3dd" containerID="258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b" exitCode=0 Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.676989 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ts2f" event={"ID":"b8821036-3506-442e-ae63-68f535b9b3dd","Type":"ContainerDied","Data":"258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b"} Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.676998 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ts2f" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.677023 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ts2f" event={"ID":"b8821036-3506-442e-ae63-68f535b9b3dd","Type":"ContainerDied","Data":"40a9361210e11a46c4f5ecf9c561bdfdaddf6ce5a990e1cb107186d4e7bdc0ef"} Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.677043 5081 scope.go:117] "RemoveContainer" containerID="258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.693863 5081 scope.go:117] "RemoveContainer" containerID="df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.716993 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ts2f"] Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.721096 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9ts2f"] Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.735492 5081 scope.go:117] "RemoveContainer" containerID="33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.749547 5081 scope.go:117] "RemoveContainer" containerID="258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b" Oct 03 16:45:53 crc kubenswrapper[5081]: E1003 16:45:53.749988 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b\": container with ID starting with 258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b not found: ID does not exist" containerID="258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.750116 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b"} err="failed to get container status \"258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b\": rpc error: code = NotFound desc = could not find container \"258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b\": container with ID starting with 258d73a990cff440a27491826dad8a5f0682bbbc2de3523d728c2b4a4856b69b not found: ID does not exist" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.750225 5081 scope.go:117] "RemoveContainer" containerID="df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5" Oct 03 16:45:53 crc kubenswrapper[5081]: E1003 16:45:53.750603 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5\": container with ID starting with df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5 not found: ID does not exist" containerID="df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.750643 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5"} err="failed to get container status \"df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5\": rpc error: code = NotFound desc = could not find container \"df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5\": container with ID starting with df6051dcdd0ae8ea291f15006c431799048a5a7d9c7afaf54fd64638d03411d5 not found: ID does not exist" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.750666 5081 scope.go:117] "RemoveContainer" containerID="33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55" Oct 03 16:45:53 crc kubenswrapper[5081]: E1003 16:45:53.750889 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55\": container with ID starting with 33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55 not found: ID does not exist" containerID="33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.750985 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55"} err="failed to get container status \"33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55\": rpc error: code = NotFound desc = could not find container \"33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55\": container with ID starting with 33442060bf3a5de86c8c7eff41056789092d6c8461d253f91ab0cf78f30a6e55 not found: ID does not exist" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.761847 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8821036-3506-442e-ae63-68f535b9b3dd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:45:53 crc kubenswrapper[5081]: I1003 16:45:53.835909 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" path="/var/lib/kubelet/pods/b8821036-3506-442e-ae63-68f535b9b3dd/volumes" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.115141 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8459d5dff9-bvghr"] Oct 03 16:47:24 crc kubenswrapper[5081]: E1003 16:47:24.116142 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="registry-server" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.116158 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="registry-server" Oct 03 16:47:24 crc kubenswrapper[5081]: E1003 16:47:24.116175 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="extract-utilities" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.116183 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="extract-utilities" Oct 03 16:47:24 crc kubenswrapper[5081]: E1003 16:47:24.116202 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="extract-content" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.116210 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="extract-content" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.116375 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8821036-3506-442e-ae63-68f535b9b3dd" containerName="registry-server" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.117227 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.119678 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.119968 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.120109 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-2m54g" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.121197 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.121220 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.154118 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8459d5dff9-bvghr"] Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.225460 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp9sf\" (UniqueName: \"kubernetes.io/projected/63ed3c1f-9b69-4f96-8ca8-456b43827adc-kube-api-access-gp9sf\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.225523 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-dns-svc\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.225653 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-config\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.329387 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp9sf\" (UniqueName: \"kubernetes.io/projected/63ed3c1f-9b69-4f96-8ca8-456b43827adc-kube-api-access-gp9sf\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.329452 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-dns-svc\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.329577 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-config\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.330749 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-config\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.331300 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-dns-svc\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.375125 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp9sf\" (UniqueName: \"kubernetes.io/projected/63ed3c1f-9b69-4f96-8ca8-456b43827adc-kube-api-access-gp9sf\") pod \"dnsmasq-dns-8459d5dff9-bvghr\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.444844 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.539352 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b77d44889-swrg8"] Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.541875 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.558582 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b77d44889-swrg8"] Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.641744 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-config\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.642175 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn9wd\" (UniqueName: \"kubernetes.io/projected/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-kube-api-access-xn9wd\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.642278 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-dns-svc\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.743718 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn9wd\" (UniqueName: \"kubernetes.io/projected/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-kube-api-access-xn9wd\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.743837 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-dns-svc\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.743894 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-config\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.744788 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-config\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.745552 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-dns-svc\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.763473 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn9wd\" (UniqueName: \"kubernetes.io/projected/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-kube-api-access-xn9wd\") pod \"dnsmasq-dns-5b77d44889-swrg8\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:24 crc kubenswrapper[5081]: I1003 16:47:24.891038 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.017626 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8459d5dff9-bvghr"] Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.287293 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.288603 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.290660 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.290843 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-844dm" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.290881 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.290987 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.291062 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.299818 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.321094 5081 generic.go:334] "Generic (PLEG): container finished" podID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerID="73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73" exitCode=0 Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.321152 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" event={"ID":"63ed3c1f-9b69-4f96-8ca8-456b43827adc","Type":"ContainerDied","Data":"73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73"} Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.321356 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" event={"ID":"63ed3c1f-9b69-4f96-8ca8-456b43827adc","Type":"ContainerStarted","Data":"c8b3ddaa6386c4b5d4c0548ea16c2e28e8690c967ec45c0d60e51c83e543c0e9"} Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.325703 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b77d44889-swrg8"] Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463520 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463647 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p48p\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-kube-api-access-2p48p\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463678 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463752 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463783 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463817 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463842 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463903 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.463963 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: E1003 16:47:25.527779 5081 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 03 16:47:25 crc kubenswrapper[5081]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/63ed3c1f-9b69-4f96-8ca8-456b43827adc/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 16:47:25 crc kubenswrapper[5081]: > podSandboxID="c8b3ddaa6386c4b5d4c0548ea16c2e28e8690c967ec45c0d60e51c83e543c0e9" Oct 03 16:47:25 crc kubenswrapper[5081]: E1003 16:47:25.527964 5081 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 03 16:47:25 crc kubenswrapper[5081]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:deed73df7ec3db8068a2ded61c540a3fa530863d2c77498014508b022c542db5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8chc6h5bh56fh546hb7hc8h67h5bchffh577h697h5b5h5bdh59bhf6hf4h558hb5h578h595h5cchfbh644h59ch7fh654h547h587h5cbh5d5h8fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gp9sf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8459d5dff9-bvghr_openstack(63ed3c1f-9b69-4f96-8ca8-456b43827adc): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/63ed3c1f-9b69-4f96-8ca8-456b43827adc/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 16:47:25 crc kubenswrapper[5081]: > logger="UnhandledError" Oct 03 16:47:25 crc kubenswrapper[5081]: E1003 16:47:25.529036 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/63ed3c1f-9b69-4f96-8ca8-456b43827adc/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.564976 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.565331 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.565425 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.565581 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p48p\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-kube-api-access-2p48p\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.565687 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.565785 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.566265 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.567070 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.567236 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.565873 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.566938 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.568361 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.569867 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.573587 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.573777 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.575392 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.576722 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.576788 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b1beec300a485d9bc8c97144678f0cf0e39d898f5c098f9c99d44d3284c8beb5/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.588225 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p48p\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-kube-api-access-2p48p\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.620240 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.701000 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.702273 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.704812 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.704857 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.704823 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-gdlt5" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.705107 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.705162 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.719059 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.870774 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.870821 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.870842 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1a4920b-c6bb-4e51-a317-8b899ba730e5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.870871 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzgj8\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-kube-api-access-dzgj8\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.870988 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.871053 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.871099 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.871121 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.871163 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1a4920b-c6bb-4e51-a317-8b899ba730e5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.919678 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972250 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972330 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972365 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972416 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1a4920b-c6bb-4e51-a317-8b899ba730e5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972449 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972469 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972492 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1a4920b-c6bb-4e51-a317-8b899ba730e5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.972526 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzgj8\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-kube-api-access-dzgj8\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.973625 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.973724 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.973963 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.974051 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.974358 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.974920 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.974951 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0cfbab479e9d0ebba3db0af50f6e969898bebde1ddd47ba8c253b86e70fc42b1/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.976018 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1a4920b-c6bb-4e51-a317-8b899ba730e5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.976381 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.976405 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1a4920b-c6bb-4e51-a317-8b899ba730e5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:25 crc kubenswrapper[5081]: I1003 16:47:25.990733 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzgj8\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-kube-api-access-dzgj8\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.012708 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.037872 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.311871 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.330233 5081 generic.go:334] "Generic (PLEG): container finished" podID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerID="9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748" exitCode=0 Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.330305 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" event={"ID":"4f3545ab-0602-43f1-9777-d7a7b58f1c2c","Type":"ContainerDied","Data":"9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748"} Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.330406 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" event={"ID":"4f3545ab-0602-43f1-9777-d7a7b58f1c2c","Type":"ContainerStarted","Data":"4912753ad589330d7db9a71c8804af9479c4efde0df45e624c6d0f399f0cb006"} Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.332669 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d1a4920b-c6bb-4e51-a317-8b899ba730e5","Type":"ContainerStarted","Data":"d693f04b54a424ff4ac9ec20ee732354bfc7f8fb1c147640cfb22544e21a7605"} Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.363059 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.980386 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.981892 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.983949 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-knhtr" Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.985110 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 03 16:47:26 crc kubenswrapper[5081]: I1003 16:47:26.990501 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.090742 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e6ff59-4218-4af3-8a06-ab7babff11f1-kolla-config\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.090794 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e6ff59-4218-4af3-8a06-ab7babff11f1-config-data\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.090854 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnrpq\" (UniqueName: \"kubernetes.io/projected/85e6ff59-4218-4af3-8a06-ab7babff11f1-kube-api-access-gnrpq\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.191807 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnrpq\" (UniqueName: \"kubernetes.io/projected/85e6ff59-4218-4af3-8a06-ab7babff11f1-kube-api-access-gnrpq\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.191918 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e6ff59-4218-4af3-8a06-ab7babff11f1-kolla-config\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.191952 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e6ff59-4218-4af3-8a06-ab7babff11f1-config-data\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.192882 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e6ff59-4218-4af3-8a06-ab7babff11f1-config-data\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.193187 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e6ff59-4218-4af3-8a06-ab7babff11f1-kolla-config\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.210462 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnrpq\" (UniqueName: \"kubernetes.io/projected/85e6ff59-4218-4af3-8a06-ab7babff11f1-kube-api-access-gnrpq\") pod \"memcached-0\" (UID: \"85e6ff59-4218-4af3-8a06-ab7babff11f1\") " pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.298016 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.341115 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" event={"ID":"4f3545ab-0602-43f1-9777-d7a7b58f1c2c","Type":"ContainerStarted","Data":"14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465"} Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.341264 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.343173 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dabe8a70-65de-444d-b6f0-776ba7a0e7e9","Type":"ContainerStarted","Data":"021e777fd6d67e1408cb2f297f86da7a8fa63a822095e1fab06d59057d869894"} Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.345314 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" event={"ID":"63ed3c1f-9b69-4f96-8ca8-456b43827adc","Type":"ContainerStarted","Data":"7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e"} Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.345539 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.364358 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" podStartSLOduration=3.364342806 podStartE2EDuration="3.364342806s" podCreationTimestamp="2025-10-03 16:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:47:27.362056811 +0000 UTC m=+4766.327613444" watchObservedRunningTime="2025-10-03 16:47:27.364342806 +0000 UTC m=+4766.329899419" Oct 03 16:47:27 crc kubenswrapper[5081]: I1003 16:47:27.415359 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" podStartSLOduration=3.415335512 podStartE2EDuration="3.415335512s" podCreationTimestamp="2025-10-03 16:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:47:27.41140572 +0000 UTC m=+4766.376962353" watchObservedRunningTime="2025-10-03 16:47:27.415335512 +0000 UTC m=+4766.380892125" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:27.910473 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.325122 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.328997 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.330788 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.332835 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-5942s" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.332893 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.333296 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.335456 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.379278 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.383257 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.385979 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d1a4920b-c6bb-4e51-a317-8b899ba730e5","Type":"ContainerStarted","Data":"37800ec81a808ad1e2341a5cb7784598c92f69fd5c9b9f2e3eb26b0ff24efc06"} Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.388927 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.390261 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.391867 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"85e6ff59-4218-4af3-8a06-ab7babff11f1","Type":"ContainerStarted","Data":"641fbeeb14fa4496b46ba795fef4efeb9004dda2b7a349a0bb8b284ccd9be847"} Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.391913 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"85e6ff59-4218-4af3-8a06-ab7babff11f1","Type":"ContainerStarted","Data":"74fa788de5a49eaa501add26e06d7b931b1705e039b1799b763b1352072be93d"} Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.392762 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.394286 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-bvlsd" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.394509 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.394752 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.395049 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.397282 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dabe8a70-65de-444d-b6f0-776ba7a0e7e9","Type":"ContainerStarted","Data":"4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe"} Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.425989 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.426865 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-config-data-default\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.426940 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-secrets\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.426981 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.427240 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.427286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f2d92c51-1c3d-401a-b405-973b0ec094b7-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.427310 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.427359 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-kolla-config\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.427451 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbszz\" (UniqueName: \"kubernetes.io/projected/f2d92c51-1c3d-401a-b405-973b0ec094b7-kube-api-access-sbszz\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.427667 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.464649 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.464624213 podStartE2EDuration="2.464624213s" podCreationTimestamp="2025-10-03 16:47:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:47:28.457800288 +0000 UTC m=+4767.423356921" watchObservedRunningTime="2025-10-03 16:47:28.464624213 +0000 UTC m=+4767.430180826" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.528949 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-kolla-config\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529016 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbszz\" (UniqueName: \"kubernetes.io/projected/f2d92c51-1c3d-401a-b405-973b0ec094b7-kube-api-access-sbszz\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529054 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529094 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529115 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529137 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529154 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tflqj\" (UniqueName: \"kubernetes.io/projected/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-kube-api-access-tflqj\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529248 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529301 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529361 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-config-data-default\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529406 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.529606 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-secrets\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.531485 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-kolla-config\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.531545 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-config-data-default\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.531893 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.532035 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.532098 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.532173 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.532206 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f2d92c51-1c3d-401a-b405-973b0ec094b7-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.532231 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.532913 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f2d92c51-1c3d-401a-b405-973b0ec094b7-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.533188 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2d92c51-1c3d-401a-b405-973b0ec094b7-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.536574 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-secrets\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.536665 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.536848 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d92c51-1c3d-401a-b405-973b0ec094b7-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.537865 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.537894 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/73609bffea3f68aa82014f3c5cac2db50e3e62594122aa00302e97f649999211/globalmount\"" pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.553339 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbszz\" (UniqueName: \"kubernetes.io/projected/f2d92c51-1c3d-401a-b405-973b0ec094b7-kube-api-access-sbszz\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.566639 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40a59c6e-72ef-4908-b7ac-d49ebad57ba6\") pod \"openstack-galera-0\" (UID: \"f2d92c51-1c3d-401a-b405-973b0ec094b7\") " pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634077 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tflqj\" (UniqueName: \"kubernetes.io/projected/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-kube-api-access-tflqj\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634150 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634219 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634282 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634313 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634395 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634425 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634446 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.634475 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.635066 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.635672 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.636023 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.636122 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.637972 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.639345 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.639714 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.644499 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.644535 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c8c13b89227c53cff85c37ef142ae5649007ad23ccf708f396162692b1eee0f3/globalmount\"" pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.652047 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tflqj\" (UniqueName: \"kubernetes.io/projected/afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc-kube-api-access-tflqj\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.669472 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.672432 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b34fbad-dae8-4f83-bb8d-704ea740d6d4\") pod \"openstack-cell1-galera-0\" (UID: \"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc\") " pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:28 crc kubenswrapper[5081]: I1003 16:47:28.706958 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:29 crc kubenswrapper[5081]: W1003 16:47:29.116258 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2d92c51_1c3d_401a_b405_973b0ec094b7.slice/crio-c5ca08d56bae976b3b0c449724fbfb9083f2fd818966b00210b380535819553d WatchSource:0}: Error finding container c5ca08d56bae976b3b0c449724fbfb9083f2fd818966b00210b380535819553d: Status 404 returned error can't find the container with id c5ca08d56bae976b3b0c449724fbfb9083f2fd818966b00210b380535819553d Oct 03 16:47:29 crc kubenswrapper[5081]: I1003 16:47:29.119153 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 16:47:29 crc kubenswrapper[5081]: I1003 16:47:29.187040 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 16:47:29 crc kubenswrapper[5081]: W1003 16:47:29.192552 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafc0e0e6_9d42_4e1e_9dd5_79d2634f26bc.slice/crio-5f0a8fc945c5f470376dfe8fd98e15d1a4c6e7c0484342ebb9af78a52eb39d84 WatchSource:0}: Error finding container 5f0a8fc945c5f470376dfe8fd98e15d1a4c6e7c0484342ebb9af78a52eb39d84: Status 404 returned error can't find the container with id 5f0a8fc945c5f470376dfe8fd98e15d1a4c6e7c0484342ebb9af78a52eb39d84 Oct 03 16:47:29 crc kubenswrapper[5081]: I1003 16:47:29.403922 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2d92c51-1c3d-401a-b405-973b0ec094b7","Type":"ContainerStarted","Data":"2b91ba61534ceef603b5e78cd8cc84ca86b51672577b5502cb817f24443dabc1"} Oct 03 16:47:29 crc kubenswrapper[5081]: I1003 16:47:29.404242 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2d92c51-1c3d-401a-b405-973b0ec094b7","Type":"ContainerStarted","Data":"c5ca08d56bae976b3b0c449724fbfb9083f2fd818966b00210b380535819553d"} Oct 03 16:47:29 crc kubenswrapper[5081]: I1003 16:47:29.405517 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc","Type":"ContainerStarted","Data":"d141e49df0db148a00adc5febd1f202bcd83d5fa00a63ea1a9a61386acdb99fc"} Oct 03 16:47:29 crc kubenswrapper[5081]: I1003 16:47:29.405593 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc","Type":"ContainerStarted","Data":"5f0a8fc945c5f470376dfe8fd98e15d1a4c6e7c0484342ebb9af78a52eb39d84"} Oct 03 16:47:30 crc kubenswrapper[5081]: I1003 16:47:30.647061 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:47:30 crc kubenswrapper[5081]: I1003 16:47:30.647387 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:47:33 crc kubenswrapper[5081]: I1003 16:47:33.435201 5081 generic.go:334] "Generic (PLEG): container finished" podID="f2d92c51-1c3d-401a-b405-973b0ec094b7" containerID="2b91ba61534ceef603b5e78cd8cc84ca86b51672577b5502cb817f24443dabc1" exitCode=0 Oct 03 16:47:33 crc kubenswrapper[5081]: I1003 16:47:33.435297 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2d92c51-1c3d-401a-b405-973b0ec094b7","Type":"ContainerDied","Data":"2b91ba61534ceef603b5e78cd8cc84ca86b51672577b5502cb817f24443dabc1"} Oct 03 16:47:33 crc kubenswrapper[5081]: I1003 16:47:33.438693 5081 generic.go:334] "Generic (PLEG): container finished" podID="afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc" containerID="d141e49df0db148a00adc5febd1f202bcd83d5fa00a63ea1a9a61386acdb99fc" exitCode=0 Oct 03 16:47:33 crc kubenswrapper[5081]: I1003 16:47:33.438728 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc","Type":"ContainerDied","Data":"d141e49df0db148a00adc5febd1f202bcd83d5fa00a63ea1a9a61386acdb99fc"} Oct 03 16:47:34 crc kubenswrapper[5081]: I1003 16:47:34.446332 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:34 crc kubenswrapper[5081]: I1003 16:47:34.447319 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f2d92c51-1c3d-401a-b405-973b0ec094b7","Type":"ContainerStarted","Data":"42730826e182dd40afa2d0d433a023f007807eea3bf50c38c476862bc00b3422"} Oct 03 16:47:34 crc kubenswrapper[5081]: I1003 16:47:34.449424 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc","Type":"ContainerStarted","Data":"2725f54f5732ff466a2e1e3b1350c1be4261a8a9af0f338d484d01418b88d95b"} Oct 03 16:47:34 crc kubenswrapper[5081]: I1003 16:47:34.496642 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.496623261 podStartE2EDuration="7.496623261s" podCreationTimestamp="2025-10-03 16:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:47:34.489771775 +0000 UTC m=+4773.455328398" watchObservedRunningTime="2025-10-03 16:47:34.496623261 +0000 UTC m=+4773.462179874" Oct 03 16:47:34 crc kubenswrapper[5081]: I1003 16:47:34.530898 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.530858309 podStartE2EDuration="7.530858309s" podCreationTimestamp="2025-10-03 16:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:47:34.515877041 +0000 UTC m=+4773.481433664" watchObservedRunningTime="2025-10-03 16:47:34.530858309 +0000 UTC m=+4773.496414922" Oct 03 16:47:34 crc kubenswrapper[5081]: I1003 16:47:34.892810 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:47:34 crc kubenswrapper[5081]: I1003 16:47:34.944675 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8459d5dff9-bvghr"] Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.462508 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerName="dnsmasq-dns" containerID="cri-o://7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e" gracePeriod=10 Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.852471 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.942755 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-config\") pod \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.942898 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-dns-svc\") pod \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.942932 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp9sf\" (UniqueName: \"kubernetes.io/projected/63ed3c1f-9b69-4f96-8ca8-456b43827adc-kube-api-access-gp9sf\") pod \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\" (UID: \"63ed3c1f-9b69-4f96-8ca8-456b43827adc\") " Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.948292 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63ed3c1f-9b69-4f96-8ca8-456b43827adc-kube-api-access-gp9sf" (OuterVolumeSpecName: "kube-api-access-gp9sf") pod "63ed3c1f-9b69-4f96-8ca8-456b43827adc" (UID: "63ed3c1f-9b69-4f96-8ca8-456b43827adc"). InnerVolumeSpecName "kube-api-access-gp9sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.979010 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-config" (OuterVolumeSpecName: "config") pod "63ed3c1f-9b69-4f96-8ca8-456b43827adc" (UID: "63ed3c1f-9b69-4f96-8ca8-456b43827adc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:47:35 crc kubenswrapper[5081]: I1003 16:47:35.979055 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "63ed3c1f-9b69-4f96-8ca8-456b43827adc" (UID: "63ed3c1f-9b69-4f96-8ca8-456b43827adc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.045208 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.045243 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp9sf\" (UniqueName: \"kubernetes.io/projected/63ed3c1f-9b69-4f96-8ca8-456b43827adc-kube-api-access-gp9sf\") on node \"crc\" DevicePath \"\"" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.045254 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ed3c1f-9b69-4f96-8ca8-456b43827adc-config\") on node \"crc\" DevicePath \"\"" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.465124 5081 generic.go:334] "Generic (PLEG): container finished" podID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerID="7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e" exitCode=0 Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.465173 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.465174 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" event={"ID":"63ed3c1f-9b69-4f96-8ca8-456b43827adc","Type":"ContainerDied","Data":"7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e"} Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.465270 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8459d5dff9-bvghr" event={"ID":"63ed3c1f-9b69-4f96-8ca8-456b43827adc","Type":"ContainerDied","Data":"c8b3ddaa6386c4b5d4c0548ea16c2e28e8690c967ec45c0d60e51c83e543c0e9"} Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.465288 5081 scope.go:117] "RemoveContainer" containerID="7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.489070 5081 scope.go:117] "RemoveContainer" containerID="73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.499056 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8459d5dff9-bvghr"] Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.505918 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8459d5dff9-bvghr"] Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.523272 5081 scope.go:117] "RemoveContainer" containerID="7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e" Oct 03 16:47:36 crc kubenswrapper[5081]: E1003 16:47:36.523681 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e\": container with ID starting with 7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e not found: ID does not exist" containerID="7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.523722 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e"} err="failed to get container status \"7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e\": rpc error: code = NotFound desc = could not find container \"7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e\": container with ID starting with 7d490e736dbb0a08dc7c5c8a9649d21cf97690a67cad8c04bce8e2e20b8e934e not found: ID does not exist" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.523747 5081 scope.go:117] "RemoveContainer" containerID="73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73" Oct 03 16:47:36 crc kubenswrapper[5081]: E1003 16:47:36.524000 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73\": container with ID starting with 73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73 not found: ID does not exist" containerID="73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73" Oct 03 16:47:36 crc kubenswrapper[5081]: I1003 16:47:36.524053 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73"} err="failed to get container status \"73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73\": rpc error: code = NotFound desc = could not find container \"73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73\": container with ID starting with 73c105ca824aa306c8da912417651f2e788dc4ed4415bb53eef36ec751975d73 not found: ID does not exist" Oct 03 16:47:37 crc kubenswrapper[5081]: I1003 16:47:37.299368 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 03 16:47:37 crc kubenswrapper[5081]: E1003 16:47:37.716052 5081 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.59:50286->38.102.83.59:44537: write tcp 38.102.83.59:50286->38.102.83.59:44537: write: broken pipe Oct 03 16:47:37 crc kubenswrapper[5081]: I1003 16:47:37.839714 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" path="/var/lib/kubelet/pods/63ed3c1f-9b69-4f96-8ca8-456b43827adc/volumes" Oct 03 16:47:38 crc kubenswrapper[5081]: I1003 16:47:38.670691 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 03 16:47:38 crc kubenswrapper[5081]: I1003 16:47:38.670784 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 03 16:47:38 crc kubenswrapper[5081]: I1003 16:47:38.708018 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:38 crc kubenswrapper[5081]: I1003 16:47:38.708414 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:38 crc kubenswrapper[5081]: I1003 16:47:38.715590 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 03 16:47:39 crc kubenswrapper[5081]: I1003 16:47:39.552240 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 03 16:47:40 crc kubenswrapper[5081]: I1003 16:47:40.759857 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:40 crc kubenswrapper[5081]: I1003 16:47:40.798791 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 03 16:47:59 crc kubenswrapper[5081]: I1003 16:47:59.635849 5081 generic.go:334] "Generic (PLEG): container finished" podID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerID="37800ec81a808ad1e2341a5cb7784598c92f69fd5c9b9f2e3eb26b0ff24efc06" exitCode=0 Oct 03 16:47:59 crc kubenswrapper[5081]: I1003 16:47:59.635935 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d1a4920b-c6bb-4e51-a317-8b899ba730e5","Type":"ContainerDied","Data":"37800ec81a808ad1e2341a5cb7784598c92f69fd5c9b9f2e3eb26b0ff24efc06"} Oct 03 16:47:59 crc kubenswrapper[5081]: I1003 16:47:59.637797 5081 generic.go:334] "Generic (PLEG): container finished" podID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerID="4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe" exitCode=0 Oct 03 16:47:59 crc kubenswrapper[5081]: I1003 16:47:59.637829 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dabe8a70-65de-444d-b6f0-776ba7a0e7e9","Type":"ContainerDied","Data":"4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe"} Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.645968 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d1a4920b-c6bb-4e51-a317-8b899ba730e5","Type":"ContainerStarted","Data":"3bf3bd31f6d854bc8390ecdbb342f179be3855cb530918a50833de8f5bcb4924"} Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.646495 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.647339 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.647401 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.649391 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dabe8a70-65de-444d-b6f0-776ba7a0e7e9","Type":"ContainerStarted","Data":"1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7"} Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.649566 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.675167 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.675150662 podStartE2EDuration="36.675150662s" podCreationTimestamp="2025-10-03 16:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:48:00.670305314 +0000 UTC m=+4799.635861927" watchObservedRunningTime="2025-10-03 16:48:00.675150662 +0000 UTC m=+4799.640707275" Oct 03 16:48:00 crc kubenswrapper[5081]: I1003 16:48:00.696300 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.696281596 podStartE2EDuration="36.696281596s" podCreationTimestamp="2025-10-03 16:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:48:00.689848212 +0000 UTC m=+4799.655404835" watchObservedRunningTime="2025-10-03 16:48:00.696281596 +0000 UTC m=+4799.661838209" Oct 03 16:48:15 crc kubenswrapper[5081]: I1003 16:48:15.922419 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 16:48:16 crc kubenswrapper[5081]: I1003 16:48:16.040677 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.405397 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-669d466995-flj8w"] Oct 03 16:48:21 crc kubenswrapper[5081]: E1003 16:48:21.406017 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerName="init" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.406029 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerName="init" Oct 03 16:48:21 crc kubenswrapper[5081]: E1003 16:48:21.406253 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerName="dnsmasq-dns" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.406259 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerName="dnsmasq-dns" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.406404 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="63ed3c1f-9b69-4f96-8ca8-456b43827adc" containerName="dnsmasq-dns" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.407156 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.421874 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669d466995-flj8w"] Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.531731 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-dns-svc\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.531959 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-config\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.532135 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dckz5\" (UniqueName: \"kubernetes.io/projected/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-kube-api-access-dckz5\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.633838 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dckz5\" (UniqueName: \"kubernetes.io/projected/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-kube-api-access-dckz5\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.633941 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-dns-svc\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.634001 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-config\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.634997 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-dns-svc\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.635111 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-config\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.652724 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dckz5\" (UniqueName: \"kubernetes.io/projected/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-kube-api-access-dckz5\") pod \"dnsmasq-dns-669d466995-flj8w\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:21 crc kubenswrapper[5081]: I1003 16:48:21.731324 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:22 crc kubenswrapper[5081]: I1003 16:48:22.112438 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:48:22 crc kubenswrapper[5081]: I1003 16:48:22.172990 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669d466995-flj8w"] Oct 03 16:48:22 crc kubenswrapper[5081]: W1003 16:48:22.177280 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbba4d832_4690_4a31_a7c3_9b2139cbeb2f.slice/crio-4117842db802491b321f907623f6f4a7b9a574a2583869e3275d45a0654afa7a WatchSource:0}: Error finding container 4117842db802491b321f907623f6f4a7b9a574a2583869e3275d45a0654afa7a: Status 404 returned error can't find the container with id 4117842db802491b321f907623f6f4a7b9a574a2583869e3275d45a0654afa7a Oct 03 16:48:22 crc kubenswrapper[5081]: I1003 16:48:22.782441 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:48:22 crc kubenswrapper[5081]: I1003 16:48:22.804751 5081 generic.go:334] "Generic (PLEG): container finished" podID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerID="362159724da2618ab4905b738878bacb754ce61aab453b7c01ba39e37548d10b" exitCode=0 Oct 03 16:48:22 crc kubenswrapper[5081]: I1003 16:48:22.804823 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669d466995-flj8w" event={"ID":"bba4d832-4690-4a31-a7c3-9b2139cbeb2f","Type":"ContainerDied","Data":"362159724da2618ab4905b738878bacb754ce61aab453b7c01ba39e37548d10b"} Oct 03 16:48:22 crc kubenswrapper[5081]: I1003 16:48:22.804855 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669d466995-flj8w" event={"ID":"bba4d832-4690-4a31-a7c3-9b2139cbeb2f","Type":"ContainerStarted","Data":"4117842db802491b321f907623f6f4a7b9a574a2583869e3275d45a0654afa7a"} Oct 03 16:48:23 crc kubenswrapper[5081]: I1003 16:48:23.814059 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669d466995-flj8w" event={"ID":"bba4d832-4690-4a31-a7c3-9b2139cbeb2f","Type":"ContainerStarted","Data":"2c57c7c17665904a0af5bc48e5aa7f368341c5baf8297af7b8d67d4f96c2b18e"} Oct 03 16:48:23 crc kubenswrapper[5081]: I1003 16:48:23.814456 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:23 crc kubenswrapper[5081]: I1003 16:48:23.830043 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-669d466995-flj8w" podStartSLOduration=2.830025267 podStartE2EDuration="2.830025267s" podCreationTimestamp="2025-10-03 16:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:48:23.828106552 +0000 UTC m=+4822.793663185" watchObservedRunningTime="2025-10-03 16:48:23.830025267 +0000 UTC m=+4822.795581880" Oct 03 16:48:24 crc kubenswrapper[5081]: I1003 16:48:24.147944 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerName="rabbitmq" containerID="cri-o://1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7" gracePeriod=604798 Oct 03 16:48:24 crc kubenswrapper[5081]: I1003 16:48:24.637771 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerName="rabbitmq" containerID="cri-o://3bf3bd31f6d854bc8390ecdbb342f179be3855cb530918a50833de8f5bcb4924" gracePeriod=604799 Oct 03 16:48:25 crc kubenswrapper[5081]: I1003 16:48:25.921254 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.240:5672: connect: connection refused" Oct 03 16:48:26 crc kubenswrapper[5081]: I1003 16:48:26.039244 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.241:5672: connect: connection refused" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.647169 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.647239 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.647284 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.648013 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"35faa1c942b89ebc4799ceb91808709fd7e10cbf620370c3e31f58d6810401b7"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.648066 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://35faa1c942b89ebc4799ceb91808709fd7e10cbf620370c3e31f58d6810401b7" gracePeriod=600 Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.712340 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873088 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-erlang-cookie-secret\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873495 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-plugins\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873572 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-server-conf\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873619 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p48p\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-kube-api-access-2p48p\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873770 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873802 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-confd\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873831 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-erlang-cookie\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873867 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-plugins-conf\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.873891 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-pod-info\") pod \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\" (UID: \"dabe8a70-65de-444d-b6f0-776ba7a0e7e9\") " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.874977 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.875009 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.875424 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.880403 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-pod-info" (OuterVolumeSpecName: "pod-info") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.882305 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.882836 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-kube-api-access-2p48p" (OuterVolumeSpecName: "kube-api-access-2p48p") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "kube-api-access-2p48p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.883636 5081 generic.go:334] "Generic (PLEG): container finished" podID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerID="3bf3bd31f6d854bc8390ecdbb342f179be3855cb530918a50833de8f5bcb4924" exitCode=0 Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.883764 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d1a4920b-c6bb-4e51-a317-8b899ba730e5","Type":"ContainerDied","Data":"3bf3bd31f6d854bc8390ecdbb342f179be3855cb530918a50833de8f5bcb4924"} Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.888243 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9" (OuterVolumeSpecName: "persistence") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.888679 5081 generic.go:334] "Generic (PLEG): container finished" podID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerID="1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7" exitCode=0 Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.888737 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dabe8a70-65de-444d-b6f0-776ba7a0e7e9","Type":"ContainerDied","Data":"1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7"} Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.888768 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dabe8a70-65de-444d-b6f0-776ba7a0e7e9","Type":"ContainerDied","Data":"021e777fd6d67e1408cb2f297f86da7a8fa63a822095e1fab06d59057d869894"} Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.888785 5081 scope.go:117] "RemoveContainer" containerID="1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.888915 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.895741 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="35faa1c942b89ebc4799ceb91808709fd7e10cbf620370c3e31f58d6810401b7" exitCode=0 Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.895913 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"35faa1c942b89ebc4799ceb91808709fd7e10cbf620370c3e31f58d6810401b7"} Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.902150 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-server-conf" (OuterVolumeSpecName: "server-conf") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.937446 5081 scope.go:117] "RemoveContainer" containerID="4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.953078 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "dabe8a70-65de-444d-b6f0-776ba7a0e7e9" (UID: "dabe8a70-65de-444d-b6f0-776ba7a0e7e9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.956817 5081 scope.go:117] "RemoveContainer" containerID="1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7" Oct 03 16:48:30 crc kubenswrapper[5081]: E1003 16:48:30.957244 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7\": container with ID starting with 1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7 not found: ID does not exist" containerID="1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.957276 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7"} err="failed to get container status \"1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7\": rpc error: code = NotFound desc = could not find container \"1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7\": container with ID starting with 1b0c2f9521a716bc0ef653ca4b10baebdd311a4b13f06bb282c547f63c1011f7 not found: ID does not exist" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.957304 5081 scope.go:117] "RemoveContainer" containerID="4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe" Oct 03 16:48:30 crc kubenswrapper[5081]: E1003 16:48:30.957548 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe\": container with ID starting with 4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe not found: ID does not exist" containerID="4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.957619 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe"} err="failed to get container status \"4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe\": rpc error: code = NotFound desc = could not find container \"4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe\": container with ID starting with 4c76b043df6e84593ca9d88e82ef6fe4932ad7bfbf1f851400f2bf3d86be7afe not found: ID does not exist" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.957637 5081 scope.go:117] "RemoveContainer" containerID="f3e2c1c8083a0353b670c51720c79973bd27686949cc3c37288c46a0c7b3a3f7" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976054 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976088 5081 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976102 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p48p\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-kube-api-access-2p48p\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976147 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") on node \"crc\" " Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976164 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976177 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976189 5081 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976198 5081 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.976207 5081 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dabe8a70-65de-444d-b6f0-776ba7a0e7e9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.994531 5081 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 03 16:48:30 crc kubenswrapper[5081]: I1003 16:48:30.994783 5081 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9") on node "crc" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.077969 5081 reconciler_common.go:293] "Volume detached for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.137262 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.222661 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.230005 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.253768 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:48:31 crc kubenswrapper[5081]: E1003 16:48:31.254140 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerName="rabbitmq" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.254161 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerName="rabbitmq" Oct 03 16:48:31 crc kubenswrapper[5081]: E1003 16:48:31.254176 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerName="setup-container" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.254183 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerName="setup-container" Oct 03 16:48:31 crc kubenswrapper[5081]: E1003 16:48:31.254210 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerName="rabbitmq" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.254218 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerName="rabbitmq" Oct 03 16:48:31 crc kubenswrapper[5081]: E1003 16:48:31.254233 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerName="setup-container" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.254241 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerName="setup-container" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.254415 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" containerName="rabbitmq" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.254444 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" containerName="rabbitmq" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.255321 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.262016 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.262056 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.262199 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.262369 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.262552 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-844dm" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.266297 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281152 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-erlang-cookie\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281271 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzgj8\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-kube-api-access-dzgj8\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281308 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1a4920b-c6bb-4e51-a317-8b899ba730e5-erlang-cookie-secret\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281520 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281592 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-confd\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281615 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-plugins-conf\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281688 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-plugins\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281706 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-server-conf\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.281740 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1a4920b-c6bb-4e51-a317-8b899ba730e5-pod-info\") pod \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\" (UID: \"d1a4920b-c6bb-4e51-a317-8b899ba730e5\") " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.282044 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.282105 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.282583 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.293143 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-kube-api-access-dzgj8" (OuterVolumeSpecName: "kube-api-access-dzgj8") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "kube-api-access-dzgj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.294745 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d1a4920b-c6bb-4e51-a317-8b899ba730e5-pod-info" (OuterVolumeSpecName: "pod-info") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.296542 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725" (OuterVolumeSpecName: "persistence") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "pvc-6ef934c1-7124-4ab2-87d2-139d17f35725". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.296834 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1a4920b-c6bb-4e51-a317-8b899ba730e5-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.307720 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-server-conf" (OuterVolumeSpecName: "server-conf") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.357682 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d1a4920b-c6bb-4e51-a317-8b899ba730e5" (UID: "d1a4920b-c6bb-4e51-a317-8b899ba730e5"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383398 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9b9bd920-4f84-49f3-b731-eceb9244abd4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383474 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383512 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383539 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9b9bd920-4f84-49f3-b731-eceb9244abd4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383585 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9b9bd920-4f84-49f3-b731-eceb9244abd4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383615 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383642 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6pkc\" (UniqueName: \"kubernetes.io/projected/9b9bd920-4f84-49f3-b731-eceb9244abd4-kube-api-access-f6pkc\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383696 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383724 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9b9bd920-4f84-49f3-b731-eceb9244abd4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383773 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383788 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzgj8\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-kube-api-access-dzgj8\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383799 5081 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d1a4920b-c6bb-4e51-a317-8b899ba730e5-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383827 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") on node \"crc\" " Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383841 5081 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383854 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383867 5081 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d1a4920b-c6bb-4e51-a317-8b899ba730e5-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383879 5081 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d1a4920b-c6bb-4e51-a317-8b899ba730e5-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.383889 5081 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d1a4920b-c6bb-4e51-a317-8b899ba730e5-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.403120 5081 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.403282 5081 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-6ef934c1-7124-4ab2-87d2-139d17f35725" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725") on node "crc" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.485759 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486253 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486271 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486328 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9b9bd920-4f84-49f3-b731-eceb9244abd4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486356 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9b9bd920-4f84-49f3-b731-eceb9244abd4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486390 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486418 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6pkc\" (UniqueName: \"kubernetes.io/projected/9b9bd920-4f84-49f3-b731-eceb9244abd4-kube-api-access-f6pkc\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486479 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486506 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9b9bd920-4f84-49f3-b731-eceb9244abd4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486542 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9b9bd920-4f84-49f3-b731-eceb9244abd4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486631 5081 reconciler_common.go:293] "Volume detached for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.486997 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.488357 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.488385 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b1beec300a485d9bc8c97144678f0cf0e39d898f5c098f9c99d44d3284c8beb5/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.488579 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9b9bd920-4f84-49f3-b731-eceb9244abd4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.488809 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9b9bd920-4f84-49f3-b731-eceb9244abd4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.490543 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9b9bd920-4f84-49f3-b731-eceb9244abd4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.490988 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9b9bd920-4f84-49f3-b731-eceb9244abd4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.493169 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9b9bd920-4f84-49f3-b731-eceb9244abd4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.506099 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6pkc\" (UniqueName: \"kubernetes.io/projected/9b9bd920-4f84-49f3-b731-eceb9244abd4-kube-api-access-f6pkc\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.522215 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-68cad1fb-4604-4276-a63d-468cac9ef2f9\") pod \"rabbitmq-server-0\" (UID: \"9b9bd920-4f84-49f3-b731-eceb9244abd4\") " pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.584376 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.740808 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.811073 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b77d44889-swrg8"] Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.811300 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" podUID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerName="dnsmasq-dns" containerID="cri-o://14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465" gracePeriod=10 Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.851010 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dabe8a70-65de-444d-b6f0-776ba7a0e7e9" path="/var/lib/kubelet/pods/dabe8a70-65de-444d-b6f0-776ba7a0e7e9/volumes" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.909689 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9"} Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.913057 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.913696 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d1a4920b-c6bb-4e51-a317-8b899ba730e5","Type":"ContainerDied","Data":"d693f04b54a424ff4ac9ec20ee732354bfc7f8fb1c147640cfb22544e21a7605"} Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.913754 5081 scope.go:117] "RemoveContainer" containerID="3bf3bd31f6d854bc8390ecdbb342f179be3855cb530918a50833de8f5bcb4924" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.956235 5081 scope.go:117] "RemoveContainer" containerID="37800ec81a808ad1e2341a5cb7784598c92f69fd5c9b9f2e3eb26b0ff24efc06" Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.959906 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.981717 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:48:31 crc kubenswrapper[5081]: I1003 16:48:31.992990 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.001655 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.003314 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.004922 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.005212 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.005393 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.005519 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.005760 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-gdlt5" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.045971 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 16:48:32 crc kubenswrapper[5081]: W1003 16:48:32.065207 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b9bd920_4f84_49f3_b731_eceb9244abd4.slice/crio-5346db114a7892a95411c282d67c7320da9b7bd8ba2b9f3b80e9641435c6c57c WatchSource:0}: Error finding container 5346db114a7892a95411c282d67c7320da9b7bd8ba2b9f3b80e9641435c6c57c: Status 404 returned error can't find the container with id 5346db114a7892a95411c282d67c7320da9b7bd8ba2b9f3b80e9641435c6c57c Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100135 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e885ab7c-f947-4729-8711-a2142a7d2667-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100246 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6zmw\" (UniqueName: \"kubernetes.io/projected/e885ab7c-f947-4729-8711-a2142a7d2667-kube-api-access-x6zmw\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100320 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e885ab7c-f947-4729-8711-a2142a7d2667-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100355 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100417 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100438 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100460 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e885ab7c-f947-4729-8711-a2142a7d2667-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.100487 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e885ab7c-f947-4729-8711-a2142a7d2667-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208222 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208333 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208362 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208385 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e885ab7c-f947-4729-8711-a2142a7d2667-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208411 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e885ab7c-f947-4729-8711-a2142a7d2667-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208442 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e885ab7c-f947-4729-8711-a2142a7d2667-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208496 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208538 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6zmw\" (UniqueName: \"kubernetes.io/projected/e885ab7c-f947-4729-8711-a2142a7d2667-kube-api-access-x6zmw\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208593 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e885ab7c-f947-4729-8711-a2142a7d2667-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.208971 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.209618 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e885ab7c-f947-4729-8711-a2142a7d2667-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.210357 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e885ab7c-f947-4729-8711-a2142a7d2667-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.210875 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.214873 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e885ab7c-f947-4729-8711-a2142a7d2667-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.216844 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e885ab7c-f947-4729-8711-a2142a7d2667-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.219448 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e885ab7c-f947-4729-8711-a2142a7d2667-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.233037 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6zmw\" (UniqueName: \"kubernetes.io/projected/e885ab7c-f947-4729-8711-a2142a7d2667-kube-api-access-x6zmw\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.240986 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.241067 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0cfbab479e9d0ebba3db0af50f6e969898bebde1ddd47ba8c253b86e70fc42b1/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.314515 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.411978 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-dns-svc\") pod \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.412098 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-config\") pod \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.412191 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xn9wd\" (UniqueName: \"kubernetes.io/projected/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-kube-api-access-xn9wd\") pod \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\" (UID: \"4f3545ab-0602-43f1-9777-d7a7b58f1c2c\") " Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.416730 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-kube-api-access-xn9wd" (OuterVolumeSpecName: "kube-api-access-xn9wd") pod "4f3545ab-0602-43f1-9777-d7a7b58f1c2c" (UID: "4f3545ab-0602-43f1-9777-d7a7b58f1c2c"). InnerVolumeSpecName "kube-api-access-xn9wd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.416819 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ef934c1-7124-4ab2-87d2-139d17f35725\") pod \"rabbitmq-cell1-server-0\" (UID: \"e885ab7c-f947-4729-8711-a2142a7d2667\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.449102 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-config" (OuterVolumeSpecName: "config") pod "4f3545ab-0602-43f1-9777-d7a7b58f1c2c" (UID: "4f3545ab-0602-43f1-9777-d7a7b58f1c2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.450539 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4f3545ab-0602-43f1-9777-d7a7b58f1c2c" (UID: "4f3545ab-0602-43f1-9777-d7a7b58f1c2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.514142 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-config\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.514195 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xn9wd\" (UniqueName: \"kubernetes.io/projected/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-kube-api-access-xn9wd\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.514209 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f3545ab-0602-43f1-9777-d7a7b58f1c2c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.633122 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.931686 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b9bd920-4f84-49f3-b731-eceb9244abd4","Type":"ContainerStarted","Data":"5346db114a7892a95411c282d67c7320da9b7bd8ba2b9f3b80e9641435c6c57c"} Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.934672 5081 generic.go:334] "Generic (PLEG): container finished" podID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerID="14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465" exitCode=0 Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.934717 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" event={"ID":"4f3545ab-0602-43f1-9777-d7a7b58f1c2c","Type":"ContainerDied","Data":"14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465"} Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.934770 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" event={"ID":"4f3545ab-0602-43f1-9777-d7a7b58f1c2c","Type":"ContainerDied","Data":"4912753ad589330d7db9a71c8804af9479c4efde0df45e624c6d0f399f0cb006"} Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.934779 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b77d44889-swrg8" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.934797 5081 scope.go:117] "RemoveContainer" containerID="14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.953732 5081 scope.go:117] "RemoveContainer" containerID="9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.970112 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b77d44889-swrg8"] Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.974858 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b77d44889-swrg8"] Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.993693 5081 scope.go:117] "RemoveContainer" containerID="14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465" Oct 03 16:48:32 crc kubenswrapper[5081]: E1003 16:48:32.994176 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465\": container with ID starting with 14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465 not found: ID does not exist" containerID="14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.994204 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465"} err="failed to get container status \"14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465\": rpc error: code = NotFound desc = could not find container \"14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465\": container with ID starting with 14c5f85ee0035575a8c6a20c59eb7162876b38a983acfcd51969a67683ba2465 not found: ID does not exist" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.994225 5081 scope.go:117] "RemoveContainer" containerID="9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748" Oct 03 16:48:32 crc kubenswrapper[5081]: E1003 16:48:32.994491 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748\": container with ID starting with 9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748 not found: ID does not exist" containerID="9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748" Oct 03 16:48:32 crc kubenswrapper[5081]: I1003 16:48:32.994540 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748"} err="failed to get container status \"9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748\": rpc error: code = NotFound desc = could not find container \"9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748\": container with ID starting with 9428ca66c989f3265ed550daf5a89bbcacc4a88ec55e414d3312992333cf9748 not found: ID does not exist" Oct 03 16:48:33 crc kubenswrapper[5081]: I1003 16:48:33.047489 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 16:48:33 crc kubenswrapper[5081]: I1003 16:48:33.837507 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" path="/var/lib/kubelet/pods/4f3545ab-0602-43f1-9777-d7a7b58f1c2c/volumes" Oct 03 16:48:33 crc kubenswrapper[5081]: I1003 16:48:33.838704 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1a4920b-c6bb-4e51-a317-8b899ba730e5" path="/var/lib/kubelet/pods/d1a4920b-c6bb-4e51-a317-8b899ba730e5/volumes" Oct 03 16:48:33 crc kubenswrapper[5081]: I1003 16:48:33.942294 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b9bd920-4f84-49f3-b731-eceb9244abd4","Type":"ContainerStarted","Data":"750da7d49a124a380471da3e770888870c7cdc518c3b9c0761596b6c73a8086e"} Oct 03 16:48:33 crc kubenswrapper[5081]: I1003 16:48:33.944003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e885ab7c-f947-4729-8711-a2142a7d2667","Type":"ContainerStarted","Data":"7fc637ae52830c248ebcbe87ada3102f20f462c4a4be2cce2f267606040d3f6a"} Oct 03 16:48:34 crc kubenswrapper[5081]: I1003 16:48:34.954164 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e885ab7c-f947-4729-8711-a2142a7d2667","Type":"ContainerStarted","Data":"d7d739787bbff1643ea0e25f1c905ea79b5f0c7ba91925e5dba7541142a47a24"} Oct 03 16:49:06 crc kubenswrapper[5081]: I1003 16:49:06.175811 5081 generic.go:334] "Generic (PLEG): container finished" podID="9b9bd920-4f84-49f3-b731-eceb9244abd4" containerID="750da7d49a124a380471da3e770888870c7cdc518c3b9c0761596b6c73a8086e" exitCode=0 Oct 03 16:49:06 crc kubenswrapper[5081]: I1003 16:49:06.175879 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b9bd920-4f84-49f3-b731-eceb9244abd4","Type":"ContainerDied","Data":"750da7d49a124a380471da3e770888870c7cdc518c3b9c0761596b6c73a8086e"} Oct 03 16:49:06 crc kubenswrapper[5081]: I1003 16:49:06.178013 5081 generic.go:334] "Generic (PLEG): container finished" podID="e885ab7c-f947-4729-8711-a2142a7d2667" containerID="d7d739787bbff1643ea0e25f1c905ea79b5f0c7ba91925e5dba7541142a47a24" exitCode=0 Oct 03 16:49:06 crc kubenswrapper[5081]: I1003 16:49:06.178053 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e885ab7c-f947-4729-8711-a2142a7d2667","Type":"ContainerDied","Data":"d7d739787bbff1643ea0e25f1c905ea79b5f0c7ba91925e5dba7541142a47a24"} Oct 03 16:49:07 crc kubenswrapper[5081]: I1003 16:49:07.187362 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9b9bd920-4f84-49f3-b731-eceb9244abd4","Type":"ContainerStarted","Data":"207849ade442049c476b427bc12cfeea3330436b830687d4b075e943bc816562"} Oct 03 16:49:07 crc kubenswrapper[5081]: I1003 16:49:07.187939 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 16:49:07 crc kubenswrapper[5081]: I1003 16:49:07.189682 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e885ab7c-f947-4729-8711-a2142a7d2667","Type":"ContainerStarted","Data":"2a71c16268ae2b87ced1925b1014222fc2bbba642245d3f3cff077d8ad3ba1c2"} Oct 03 16:49:07 crc kubenswrapper[5081]: I1003 16:49:07.189908 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:49:07 crc kubenswrapper[5081]: I1003 16:49:07.212157 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.21213826 podStartE2EDuration="36.21213826s" podCreationTimestamp="2025-10-03 16:48:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:49:07.206324904 +0000 UTC m=+4866.171881527" watchObservedRunningTime="2025-10-03 16:49:07.21213826 +0000 UTC m=+4866.177694873" Oct 03 16:49:07 crc kubenswrapper[5081]: I1003 16:49:07.231989 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.231973056 podStartE2EDuration="36.231973056s" podCreationTimestamp="2025-10-03 16:48:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:49:07.228855657 +0000 UTC m=+4866.194412290" watchObservedRunningTime="2025-10-03 16:49:07.231973056 +0000 UTC m=+4866.197529669" Oct 03 16:49:21 crc kubenswrapper[5081]: I1003 16:49:21.589541 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 16:49:22 crc kubenswrapper[5081]: I1003 16:49:22.636804 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.350485 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Oct 03 16:49:31 crc kubenswrapper[5081]: E1003 16:49:31.351226 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerName="dnsmasq-dns" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.351239 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerName="dnsmasq-dns" Oct 03 16:49:31 crc kubenswrapper[5081]: E1003 16:49:31.351273 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerName="init" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.351279 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerName="init" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.351418 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f3545ab-0602-43f1-9777-d7a7b58f1c2c" containerName="dnsmasq-dns" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.351950 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.354272 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-46z6g" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.357499 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.510306 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv7l9\" (UniqueName: \"kubernetes.io/projected/56f835b7-f677-40a8-be12-000bbb1e165b-kube-api-access-lv7l9\") pod \"mariadb-client-1-default\" (UID: \"56f835b7-f677-40a8-be12-000bbb1e165b\") " pod="openstack/mariadb-client-1-default" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.612312 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv7l9\" (UniqueName: \"kubernetes.io/projected/56f835b7-f677-40a8-be12-000bbb1e165b-kube-api-access-lv7l9\") pod \"mariadb-client-1-default\" (UID: \"56f835b7-f677-40a8-be12-000bbb1e165b\") " pod="openstack/mariadb-client-1-default" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.634452 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv7l9\" (UniqueName: \"kubernetes.io/projected/56f835b7-f677-40a8-be12-000bbb1e165b-kube-api-access-lv7l9\") pod \"mariadb-client-1-default\" (UID: \"56f835b7-f677-40a8-be12-000bbb1e165b\") " pod="openstack/mariadb-client-1-default" Oct 03 16:49:31 crc kubenswrapper[5081]: I1003 16:49:31.672142 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 03 16:49:32 crc kubenswrapper[5081]: I1003 16:49:32.197036 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 03 16:49:32 crc kubenswrapper[5081]: W1003 16:49:32.201718 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56f835b7_f677_40a8_be12_000bbb1e165b.slice/crio-b8bfd32ea892bb6c17949777c363236495470eaff6e9495403b0740bb33cdbbf WatchSource:0}: Error finding container b8bfd32ea892bb6c17949777c363236495470eaff6e9495403b0740bb33cdbbf: Status 404 returned error can't find the container with id b8bfd32ea892bb6c17949777c363236495470eaff6e9495403b0740bb33cdbbf Oct 03 16:49:32 crc kubenswrapper[5081]: I1003 16:49:32.204705 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:49:32 crc kubenswrapper[5081]: I1003 16:49:32.375376 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"56f835b7-f677-40a8-be12-000bbb1e165b","Type":"ContainerStarted","Data":"b8bfd32ea892bb6c17949777c363236495470eaff6e9495403b0740bb33cdbbf"} Oct 03 16:49:36 crc kubenswrapper[5081]: I1003 16:49:36.416127 5081 generic.go:334] "Generic (PLEG): container finished" podID="56f835b7-f677-40a8-be12-000bbb1e165b" containerID="0695746e8200792710fd2d8961ac1d1f88afaa8aed7aa46bf0a2788f4ad4af00" exitCode=0 Oct 03 16:49:36 crc kubenswrapper[5081]: I1003 16:49:36.416200 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"56f835b7-f677-40a8-be12-000bbb1e165b","Type":"ContainerDied","Data":"0695746e8200792710fd2d8961ac1d1f88afaa8aed7aa46bf0a2788f4ad4af00"} Oct 03 16:49:37 crc kubenswrapper[5081]: I1003 16:49:37.757965 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 03 16:49:37 crc kubenswrapper[5081]: I1003 16:49:37.786804 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_56f835b7-f677-40a8-be12-000bbb1e165b/mariadb-client-1-default/0.log" Oct 03 16:49:37 crc kubenswrapper[5081]: I1003 16:49:37.815057 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 03 16:49:37 crc kubenswrapper[5081]: I1003 16:49:37.820447 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 03 16:49:37 crc kubenswrapper[5081]: I1003 16:49:37.910060 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lv7l9\" (UniqueName: \"kubernetes.io/projected/56f835b7-f677-40a8-be12-000bbb1e165b-kube-api-access-lv7l9\") pod \"56f835b7-f677-40a8-be12-000bbb1e165b\" (UID: \"56f835b7-f677-40a8-be12-000bbb1e165b\") " Oct 03 16:49:37 crc kubenswrapper[5081]: I1003 16:49:37.915765 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56f835b7-f677-40a8-be12-000bbb1e165b-kube-api-access-lv7l9" (OuterVolumeSpecName: "kube-api-access-lv7l9") pod "56f835b7-f677-40a8-be12-000bbb1e165b" (UID: "56f835b7-f677-40a8-be12-000bbb1e165b"). InnerVolumeSpecName "kube-api-access-lv7l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.012040 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lv7l9\" (UniqueName: \"kubernetes.io/projected/56f835b7-f677-40a8-be12-000bbb1e165b-kube-api-access-lv7l9\") on node \"crc\" DevicePath \"\"" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.303737 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Oct 03 16:49:38 crc kubenswrapper[5081]: E1003 16:49:38.304602 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f835b7-f677-40a8-be12-000bbb1e165b" containerName="mariadb-client-1-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.304630 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f835b7-f677-40a8-be12-000bbb1e165b" containerName="mariadb-client-1-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.304869 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="56f835b7-f677-40a8-be12-000bbb1e165b" containerName="mariadb-client-1-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.305512 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.309675 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.417716 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf4v2\" (UniqueName: \"kubernetes.io/projected/f34f2dcc-4ce0-4a5d-917d-7ab131264e5e-kube-api-access-xf4v2\") pod \"mariadb-client-2-default\" (UID: \"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e\") " pod="openstack/mariadb-client-2-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.430463 5081 scope.go:117] "RemoveContainer" containerID="0695746e8200792710fd2d8961ac1d1f88afaa8aed7aa46bf0a2788f4ad4af00" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.430476 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.519088 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf4v2\" (UniqueName: \"kubernetes.io/projected/f34f2dcc-4ce0-4a5d-917d-7ab131264e5e-kube-api-access-xf4v2\") pod \"mariadb-client-2-default\" (UID: \"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e\") " pod="openstack/mariadb-client-2-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.535741 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf4v2\" (UniqueName: \"kubernetes.io/projected/f34f2dcc-4ce0-4a5d-917d-7ab131264e5e-kube-api-access-xf4v2\") pod \"mariadb-client-2-default\" (UID: \"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e\") " pod="openstack/mariadb-client-2-default" Oct 03 16:49:38 crc kubenswrapper[5081]: I1003 16:49:38.625982 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 03 16:49:39 crc kubenswrapper[5081]: I1003 16:49:39.091979 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 03 16:49:39 crc kubenswrapper[5081]: W1003 16:49:39.096579 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf34f2dcc_4ce0_4a5d_917d_7ab131264e5e.slice/crio-ea6afaad2433a1c67e897e24aff9bc3b6faf8fc4c4cea2b21cb05b16f9c6a689 WatchSource:0}: Error finding container ea6afaad2433a1c67e897e24aff9bc3b6faf8fc4c4cea2b21cb05b16f9c6a689: Status 404 returned error can't find the container with id ea6afaad2433a1c67e897e24aff9bc3b6faf8fc4c4cea2b21cb05b16f9c6a689 Oct 03 16:49:39 crc kubenswrapper[5081]: I1003 16:49:39.440128 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e","Type":"ContainerStarted","Data":"d6fe751e21ba5f507d7d512d31e22c954bbf6c73e8a1c18baffe0f65d31679fa"} Oct 03 16:49:39 crc kubenswrapper[5081]: I1003 16:49:39.440170 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e","Type":"ContainerStarted","Data":"ea6afaad2433a1c67e897e24aff9bc3b6faf8fc4c4cea2b21cb05b16f9c6a689"} Oct 03 16:49:39 crc kubenswrapper[5081]: I1003 16:49:39.459746 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=1.459722148 podStartE2EDuration="1.459722148s" podCreationTimestamp="2025-10-03 16:49:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:49:39.451347499 +0000 UTC m=+4898.416904122" watchObservedRunningTime="2025-10-03 16:49:39.459722148 +0000 UTC m=+4898.425278761" Oct 03 16:49:39 crc kubenswrapper[5081]: I1003 16:49:39.836877 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56f835b7-f677-40a8-be12-000bbb1e165b" path="/var/lib/kubelet/pods/56f835b7-f677-40a8-be12-000bbb1e165b/volumes" Oct 03 16:49:40 crc kubenswrapper[5081]: I1003 16:49:40.450464 5081 generic.go:334] "Generic (PLEG): container finished" podID="f34f2dcc-4ce0-4a5d-917d-7ab131264e5e" containerID="d6fe751e21ba5f507d7d512d31e22c954bbf6c73e8a1c18baffe0f65d31679fa" exitCode=0 Oct 03 16:49:40 crc kubenswrapper[5081]: I1003 16:49:40.450518 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e","Type":"ContainerDied","Data":"d6fe751e21ba5f507d7d512d31e22c954bbf6c73e8a1c18baffe0f65d31679fa"} Oct 03 16:49:41 crc kubenswrapper[5081]: I1003 16:49:41.795551 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 03 16:49:41 crc kubenswrapper[5081]: I1003 16:49:41.841105 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 03 16:49:41 crc kubenswrapper[5081]: I1003 16:49:41.841142 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 03 16:49:41 crc kubenswrapper[5081]: I1003 16:49:41.969571 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf4v2\" (UniqueName: \"kubernetes.io/projected/f34f2dcc-4ce0-4a5d-917d-7ab131264e5e-kube-api-access-xf4v2\") pod \"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e\" (UID: \"f34f2dcc-4ce0-4a5d-917d-7ab131264e5e\") " Oct 03 16:49:41 crc kubenswrapper[5081]: I1003 16:49:41.974798 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f34f2dcc-4ce0-4a5d-917d-7ab131264e5e-kube-api-access-xf4v2" (OuterVolumeSpecName: "kube-api-access-xf4v2") pod "f34f2dcc-4ce0-4a5d-917d-7ab131264e5e" (UID: "f34f2dcc-4ce0-4a5d-917d-7ab131264e5e"). InnerVolumeSpecName "kube-api-access-xf4v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.071785 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf4v2\" (UniqueName: \"kubernetes.io/projected/f34f2dcc-4ce0-4a5d-917d-7ab131264e5e-kube-api-access-xf4v2\") on node \"crc\" DevicePath \"\"" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.308703 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Oct 03 16:49:42 crc kubenswrapper[5081]: E1003 16:49:42.309088 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f34f2dcc-4ce0-4a5d-917d-7ab131264e5e" containerName="mariadb-client-2-default" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.309112 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f34f2dcc-4ce0-4a5d-917d-7ab131264e5e" containerName="mariadb-client-2-default" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.309586 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f34f2dcc-4ce0-4a5d-917d-7ab131264e5e" containerName="mariadb-client-2-default" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.310287 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.315330 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.465524 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea6afaad2433a1c67e897e24aff9bc3b6faf8fc4c4cea2b21cb05b16f9c6a689" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.465598 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.477264 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvd8f\" (UniqueName: \"kubernetes.io/projected/21cdf69d-060a-49b9-aa53-2589fb6bff3f-kube-api-access-rvd8f\") pod \"mariadb-client-1\" (UID: \"21cdf69d-060a-49b9-aa53-2589fb6bff3f\") " pod="openstack/mariadb-client-1" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.578413 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvd8f\" (UniqueName: \"kubernetes.io/projected/21cdf69d-060a-49b9-aa53-2589fb6bff3f-kube-api-access-rvd8f\") pod \"mariadb-client-1\" (UID: \"21cdf69d-060a-49b9-aa53-2589fb6bff3f\") " pod="openstack/mariadb-client-1" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.594765 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvd8f\" (UniqueName: \"kubernetes.io/projected/21cdf69d-060a-49b9-aa53-2589fb6bff3f-kube-api-access-rvd8f\") pod \"mariadb-client-1\" (UID: \"21cdf69d-060a-49b9-aa53-2589fb6bff3f\") " pod="openstack/mariadb-client-1" Oct 03 16:49:42 crc kubenswrapper[5081]: I1003 16:49:42.627047 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 03 16:49:43 crc kubenswrapper[5081]: I1003 16:49:43.125692 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 03 16:49:43 crc kubenswrapper[5081]: I1003 16:49:43.473651 5081 generic.go:334] "Generic (PLEG): container finished" podID="21cdf69d-060a-49b9-aa53-2589fb6bff3f" containerID="9b79b6b559dae6cfa6ca0dbceaa271e91e15f297ed794e4922bc5651f05e28d9" exitCode=0 Oct 03 16:49:43 crc kubenswrapper[5081]: I1003 16:49:43.473694 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"21cdf69d-060a-49b9-aa53-2589fb6bff3f","Type":"ContainerDied","Data":"9b79b6b559dae6cfa6ca0dbceaa271e91e15f297ed794e4922bc5651f05e28d9"} Oct 03 16:49:43 crc kubenswrapper[5081]: I1003 16:49:43.473721 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"21cdf69d-060a-49b9-aa53-2589fb6bff3f","Type":"ContainerStarted","Data":"531b3af01d0abae38716a78b85bc33c56a10a7b255f4c3fe6a62b0e12b87c60f"} Oct 03 16:49:43 crc kubenswrapper[5081]: I1003 16:49:43.838679 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f34f2dcc-4ce0-4a5d-917d-7ab131264e5e" path="/var/lib/kubelet/pods/f34f2dcc-4ce0-4a5d-917d-7ab131264e5e/volumes" Oct 03 16:49:44 crc kubenswrapper[5081]: I1003 16:49:44.831794 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 03 16:49:44 crc kubenswrapper[5081]: I1003 16:49:44.848395 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_21cdf69d-060a-49b9-aa53-2589fb6bff3f/mariadb-client-1/0.log" Oct 03 16:49:44 crc kubenswrapper[5081]: I1003 16:49:44.873635 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Oct 03 16:49:44 crc kubenswrapper[5081]: I1003 16:49:44.880039 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.019909 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvd8f\" (UniqueName: \"kubernetes.io/projected/21cdf69d-060a-49b9-aa53-2589fb6bff3f-kube-api-access-rvd8f\") pod \"21cdf69d-060a-49b9-aa53-2589fb6bff3f\" (UID: \"21cdf69d-060a-49b9-aa53-2589fb6bff3f\") " Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.028151 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21cdf69d-060a-49b9-aa53-2589fb6bff3f-kube-api-access-rvd8f" (OuterVolumeSpecName: "kube-api-access-rvd8f") pod "21cdf69d-060a-49b9-aa53-2589fb6bff3f" (UID: "21cdf69d-060a-49b9-aa53-2589fb6bff3f"). InnerVolumeSpecName "kube-api-access-rvd8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.121933 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvd8f\" (UniqueName: \"kubernetes.io/projected/21cdf69d-060a-49b9-aa53-2589fb6bff3f-kube-api-access-rvd8f\") on node \"crc\" DevicePath \"\"" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.316749 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Oct 03 16:49:45 crc kubenswrapper[5081]: E1003 16:49:45.317505 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21cdf69d-060a-49b9-aa53-2589fb6bff3f" containerName="mariadb-client-1" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.317529 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="21cdf69d-060a-49b9-aa53-2589fb6bff3f" containerName="mariadb-client-1" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.317741 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="21cdf69d-060a-49b9-aa53-2589fb6bff3f" containerName="mariadb-client-1" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.318351 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.324147 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.426149 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbd6c\" (UniqueName: \"kubernetes.io/projected/9e799921-4495-4dc0-8e84-4da22950ee81-kube-api-access-bbd6c\") pod \"mariadb-client-4-default\" (UID: \"9e799921-4495-4dc0-8e84-4da22950ee81\") " pod="openstack/mariadb-client-4-default" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.488583 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="531b3af01d0abae38716a78b85bc33c56a10a7b255f4c3fe6a62b0e12b87c60f" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.488628 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.527537 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbd6c\" (UniqueName: \"kubernetes.io/projected/9e799921-4495-4dc0-8e84-4da22950ee81-kube-api-access-bbd6c\") pod \"mariadb-client-4-default\" (UID: \"9e799921-4495-4dc0-8e84-4da22950ee81\") " pod="openstack/mariadb-client-4-default" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.545380 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbd6c\" (UniqueName: \"kubernetes.io/projected/9e799921-4495-4dc0-8e84-4da22950ee81-kube-api-access-bbd6c\") pod \"mariadb-client-4-default\" (UID: \"9e799921-4495-4dc0-8e84-4da22950ee81\") " pod="openstack/mariadb-client-4-default" Oct 03 16:49:45 crc kubenswrapper[5081]: E1003 16:49:45.613706 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21cdf69d_060a_49b9_aa53_2589fb6bff3f.slice/crio-531b3af01d0abae38716a78b85bc33c56a10a7b255f4c3fe6a62b0e12b87c60f\": RecentStats: unable to find data in memory cache]" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.638542 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 03 16:49:45 crc kubenswrapper[5081]: I1003 16:49:45.836976 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21cdf69d-060a-49b9-aa53-2589fb6bff3f" path="/var/lib/kubelet/pods/21cdf69d-060a-49b9-aa53-2589fb6bff3f/volumes" Oct 03 16:49:46 crc kubenswrapper[5081]: I1003 16:49:46.100987 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 03 16:49:46 crc kubenswrapper[5081]: W1003 16:49:46.107546 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e799921_4495_4dc0_8e84_4da22950ee81.slice/crio-c84b9fc2d0304c71a7e17843e652d5d31555c1ce12dd6d10d577119d72aa64a1 WatchSource:0}: Error finding container c84b9fc2d0304c71a7e17843e652d5d31555c1ce12dd6d10d577119d72aa64a1: Status 404 returned error can't find the container with id c84b9fc2d0304c71a7e17843e652d5d31555c1ce12dd6d10d577119d72aa64a1 Oct 03 16:49:46 crc kubenswrapper[5081]: I1003 16:49:46.497238 5081 generic.go:334] "Generic (PLEG): container finished" podID="9e799921-4495-4dc0-8e84-4da22950ee81" containerID="dd1c4a125aae040be25a4546b23759343c3611f57586216abf5fa075ebe10ec8" exitCode=0 Oct 03 16:49:46 crc kubenswrapper[5081]: I1003 16:49:46.497345 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"9e799921-4495-4dc0-8e84-4da22950ee81","Type":"ContainerDied","Data":"dd1c4a125aae040be25a4546b23759343c3611f57586216abf5fa075ebe10ec8"} Oct 03 16:49:46 crc kubenswrapper[5081]: I1003 16:49:46.497547 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"9e799921-4495-4dc0-8e84-4da22950ee81","Type":"ContainerStarted","Data":"c84b9fc2d0304c71a7e17843e652d5d31555c1ce12dd6d10d577119d72aa64a1"} Oct 03 16:49:47 crc kubenswrapper[5081]: I1003 16:49:47.818897 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 03 16:49:47 crc kubenswrapper[5081]: I1003 16:49:47.840287 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_9e799921-4495-4dc0-8e84-4da22950ee81/mariadb-client-4-default/0.log" Oct 03 16:49:47 crc kubenswrapper[5081]: I1003 16:49:47.865167 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 03 16:49:47 crc kubenswrapper[5081]: I1003 16:49:47.872300 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 03 16:49:47 crc kubenswrapper[5081]: I1003 16:49:47.963516 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbd6c\" (UniqueName: \"kubernetes.io/projected/9e799921-4495-4dc0-8e84-4da22950ee81-kube-api-access-bbd6c\") pod \"9e799921-4495-4dc0-8e84-4da22950ee81\" (UID: \"9e799921-4495-4dc0-8e84-4da22950ee81\") " Oct 03 16:49:47 crc kubenswrapper[5081]: I1003 16:49:47.971475 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e799921-4495-4dc0-8e84-4da22950ee81-kube-api-access-bbd6c" (OuterVolumeSpecName: "kube-api-access-bbd6c") pod "9e799921-4495-4dc0-8e84-4da22950ee81" (UID: "9e799921-4495-4dc0-8e84-4da22950ee81"). InnerVolumeSpecName "kube-api-access-bbd6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:49:48 crc kubenswrapper[5081]: I1003 16:49:48.066240 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbd6c\" (UniqueName: \"kubernetes.io/projected/9e799921-4495-4dc0-8e84-4da22950ee81-kube-api-access-bbd6c\") on node \"crc\" DevicePath \"\"" Oct 03 16:49:48 crc kubenswrapper[5081]: I1003 16:49:48.515592 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c84b9fc2d0304c71a7e17843e652d5d31555c1ce12dd6d10d577119d72aa64a1" Oct 03 16:49:48 crc kubenswrapper[5081]: I1003 16:49:48.515673 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 03 16:49:49 crc kubenswrapper[5081]: I1003 16:49:49.842933 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e799921-4495-4dc0-8e84-4da22950ee81" path="/var/lib/kubelet/pods/9e799921-4495-4dc0-8e84-4da22950ee81/volumes" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.347971 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Oct 03 16:49:52 crc kubenswrapper[5081]: E1003 16:49:52.348546 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e799921-4495-4dc0-8e84-4da22950ee81" containerName="mariadb-client-4-default" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.348574 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e799921-4495-4dc0-8e84-4da22950ee81" containerName="mariadb-client-4-default" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.348742 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e799921-4495-4dc0-8e84-4da22950ee81" containerName="mariadb-client-4-default" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.349244 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.351202 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-46z6g" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.358152 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.529232 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svllt\" (UniqueName: \"kubernetes.io/projected/0580af97-e2cc-4326-b78d-a054cd6958e2-kube-api-access-svllt\") pod \"mariadb-client-5-default\" (UID: \"0580af97-e2cc-4326-b78d-a054cd6958e2\") " pod="openstack/mariadb-client-5-default" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.631142 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svllt\" (UniqueName: \"kubernetes.io/projected/0580af97-e2cc-4326-b78d-a054cd6958e2-kube-api-access-svllt\") pod \"mariadb-client-5-default\" (UID: \"0580af97-e2cc-4326-b78d-a054cd6958e2\") " pod="openstack/mariadb-client-5-default" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.650162 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svllt\" (UniqueName: \"kubernetes.io/projected/0580af97-e2cc-4326-b78d-a054cd6958e2-kube-api-access-svllt\") pod \"mariadb-client-5-default\" (UID: \"0580af97-e2cc-4326-b78d-a054cd6958e2\") " pod="openstack/mariadb-client-5-default" Oct 03 16:49:52 crc kubenswrapper[5081]: I1003 16:49:52.673303 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 03 16:49:53 crc kubenswrapper[5081]: W1003 16:49:53.160993 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0580af97_e2cc_4326_b78d_a054cd6958e2.slice/crio-9b531cf78725f53f2f27a3babc3fc0636f4f71cefdf49ea1286537432b06cc1a WatchSource:0}: Error finding container 9b531cf78725f53f2f27a3babc3fc0636f4f71cefdf49ea1286537432b06cc1a: Status 404 returned error can't find the container with id 9b531cf78725f53f2f27a3babc3fc0636f4f71cefdf49ea1286537432b06cc1a Oct 03 16:49:53 crc kubenswrapper[5081]: I1003 16:49:53.166550 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 03 16:49:53 crc kubenswrapper[5081]: I1003 16:49:53.555068 5081 generic.go:334] "Generic (PLEG): container finished" podID="0580af97-e2cc-4326-b78d-a054cd6958e2" containerID="c2822ebefc44ea8ec0a5744c377d4488bc9a0f449496837876135de32e9befbd" exitCode=0 Oct 03 16:49:53 crc kubenswrapper[5081]: I1003 16:49:53.555140 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"0580af97-e2cc-4326-b78d-a054cd6958e2","Type":"ContainerDied","Data":"c2822ebefc44ea8ec0a5744c377d4488bc9a0f449496837876135de32e9befbd"} Oct 03 16:49:53 crc kubenswrapper[5081]: I1003 16:49:53.555180 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"0580af97-e2cc-4326-b78d-a054cd6958e2","Type":"ContainerStarted","Data":"9b531cf78725f53f2f27a3babc3fc0636f4f71cefdf49ea1286537432b06cc1a"} Oct 03 16:49:54 crc kubenswrapper[5081]: I1003 16:49:54.909684 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 03 16:49:54 crc kubenswrapper[5081]: I1003 16:49:54.929690 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_0580af97-e2cc-4326-b78d-a054cd6958e2/mariadb-client-5-default/0.log" Oct 03 16:49:54 crc kubenswrapper[5081]: I1003 16:49:54.953293 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 03 16:49:54 crc kubenswrapper[5081]: I1003 16:49:54.958582 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.069674 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svllt\" (UniqueName: \"kubernetes.io/projected/0580af97-e2cc-4326-b78d-a054cd6958e2-kube-api-access-svllt\") pod \"0580af97-e2cc-4326-b78d-a054cd6958e2\" (UID: \"0580af97-e2cc-4326-b78d-a054cd6958e2\") " Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.076040 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0580af97-e2cc-4326-b78d-a054cd6958e2-kube-api-access-svllt" (OuterVolumeSpecName: "kube-api-access-svllt") pod "0580af97-e2cc-4326-b78d-a054cd6958e2" (UID: "0580af97-e2cc-4326-b78d-a054cd6958e2"). InnerVolumeSpecName "kube-api-access-svllt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.104407 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Oct 03 16:49:55 crc kubenswrapper[5081]: E1003 16:49:55.104748 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0580af97-e2cc-4326-b78d-a054cd6958e2" containerName="mariadb-client-5-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.104766 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0580af97-e2cc-4326-b78d-a054cd6958e2" containerName="mariadb-client-5-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.104891 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0580af97-e2cc-4326-b78d-a054cd6958e2" containerName="mariadb-client-5-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.105438 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.114386 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.171265 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svllt\" (UniqueName: \"kubernetes.io/projected/0580af97-e2cc-4326-b78d-a054cd6958e2-kube-api-access-svllt\") on node \"crc\" DevicePath \"\"" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.272216 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf57v\" (UniqueName: \"kubernetes.io/projected/6495f5bb-e9e9-40c9-b95c-0b113eee600e-kube-api-access-gf57v\") pod \"mariadb-client-6-default\" (UID: \"6495f5bb-e9e9-40c9-b95c-0b113eee600e\") " pod="openstack/mariadb-client-6-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.375203 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf57v\" (UniqueName: \"kubernetes.io/projected/6495f5bb-e9e9-40c9-b95c-0b113eee600e-kube-api-access-gf57v\") pod \"mariadb-client-6-default\" (UID: \"6495f5bb-e9e9-40c9-b95c-0b113eee600e\") " pod="openstack/mariadb-client-6-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.397384 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf57v\" (UniqueName: \"kubernetes.io/projected/6495f5bb-e9e9-40c9-b95c-0b113eee600e-kube-api-access-gf57v\") pod \"mariadb-client-6-default\" (UID: \"6495f5bb-e9e9-40c9-b95c-0b113eee600e\") " pod="openstack/mariadb-client-6-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.428825 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.574008 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b531cf78725f53f2f27a3babc3fc0636f4f71cefdf49ea1286537432b06cc1a" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.574102 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.769934 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 03 16:49:55 crc kubenswrapper[5081]: W1003 16:49:55.773264 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6495f5bb_e9e9_40c9_b95c_0b113eee600e.slice/crio-c6817fd1bd94fcb848ed78bcf8b0b943f793a82e811cadcca3c8888ab6f3443a WatchSource:0}: Error finding container c6817fd1bd94fcb848ed78bcf8b0b943f793a82e811cadcca3c8888ab6f3443a: Status 404 returned error can't find the container with id c6817fd1bd94fcb848ed78bcf8b0b943f793a82e811cadcca3c8888ab6f3443a Oct 03 16:49:55 crc kubenswrapper[5081]: I1003 16:49:55.840281 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0580af97-e2cc-4326-b78d-a054cd6958e2" path="/var/lib/kubelet/pods/0580af97-e2cc-4326-b78d-a054cd6958e2/volumes" Oct 03 16:49:56 crc kubenswrapper[5081]: I1003 16:49:56.585520 5081 generic.go:334] "Generic (PLEG): container finished" podID="6495f5bb-e9e9-40c9-b95c-0b113eee600e" containerID="0abfcc406620572a36480c43d866e21ff39fd74b70efb796ea84ee18f858b2ad" exitCode=0 Oct 03 16:49:56 crc kubenswrapper[5081]: I1003 16:49:56.585605 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"6495f5bb-e9e9-40c9-b95c-0b113eee600e","Type":"ContainerDied","Data":"0abfcc406620572a36480c43d866e21ff39fd74b70efb796ea84ee18f858b2ad"} Oct 03 16:49:56 crc kubenswrapper[5081]: I1003 16:49:56.585658 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"6495f5bb-e9e9-40c9-b95c-0b113eee600e","Type":"ContainerStarted","Data":"c6817fd1bd94fcb848ed78bcf8b0b943f793a82e811cadcca3c8888ab6f3443a"} Oct 03 16:49:57 crc kubenswrapper[5081]: I1003 16:49:57.945389 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 03 16:49:57 crc kubenswrapper[5081]: I1003 16:49:57.986730 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_6495f5bb-e9e9-40c9-b95c-0b113eee600e/mariadb-client-6-default/0.log" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.012126 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.019648 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.114792 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf57v\" (UniqueName: \"kubernetes.io/projected/6495f5bb-e9e9-40c9-b95c-0b113eee600e-kube-api-access-gf57v\") pod \"6495f5bb-e9e9-40c9-b95c-0b113eee600e\" (UID: \"6495f5bb-e9e9-40c9-b95c-0b113eee600e\") " Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.121001 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6495f5bb-e9e9-40c9-b95c-0b113eee600e-kube-api-access-gf57v" (OuterVolumeSpecName: "kube-api-access-gf57v") pod "6495f5bb-e9e9-40c9-b95c-0b113eee600e" (UID: "6495f5bb-e9e9-40c9-b95c-0b113eee600e"). InnerVolumeSpecName "kube-api-access-gf57v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.131091 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Oct 03 16:49:58 crc kubenswrapper[5081]: E1003 16:49:58.131463 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6495f5bb-e9e9-40c9-b95c-0b113eee600e" containerName="mariadb-client-6-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.131479 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6495f5bb-e9e9-40c9-b95c-0b113eee600e" containerName="mariadb-client-6-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.131680 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6495f5bb-e9e9-40c9-b95c-0b113eee600e" containerName="mariadb-client-6-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.132291 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.147674 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.216656 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db9n6\" (UniqueName: \"kubernetes.io/projected/e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa-kube-api-access-db9n6\") pod \"mariadb-client-7-default\" (UID: \"e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa\") " pod="openstack/mariadb-client-7-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.216955 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf57v\" (UniqueName: \"kubernetes.io/projected/6495f5bb-e9e9-40c9-b95c-0b113eee600e-kube-api-access-gf57v\") on node \"crc\" DevicePath \"\"" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.318181 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db9n6\" (UniqueName: \"kubernetes.io/projected/e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa-kube-api-access-db9n6\") pod \"mariadb-client-7-default\" (UID: \"e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa\") " pod="openstack/mariadb-client-7-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.334594 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db9n6\" (UniqueName: \"kubernetes.io/projected/e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa-kube-api-access-db9n6\") pod \"mariadb-client-7-default\" (UID: \"e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa\") " pod="openstack/mariadb-client-7-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.467959 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.605839 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6817fd1bd94fcb848ed78bcf8b0b943f793a82e811cadcca3c8888ab6f3443a" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.605913 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 03 16:49:58 crc kubenswrapper[5081]: I1003 16:49:58.928784 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 03 16:49:59 crc kubenswrapper[5081]: I1003 16:49:59.616821 5081 generic.go:334] "Generic (PLEG): container finished" podID="e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa" containerID="fb7662c62795ffb183937d9e10e81d9f16d4087ad2192706730a48d3b60b382a" exitCode=0 Oct 03 16:49:59 crc kubenswrapper[5081]: I1003 16:49:59.616869 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa","Type":"ContainerDied","Data":"fb7662c62795ffb183937d9e10e81d9f16d4087ad2192706730a48d3b60b382a"} Oct 03 16:49:59 crc kubenswrapper[5081]: I1003 16:49:59.616895 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa","Type":"ContainerStarted","Data":"40a8e479cb963aba3c5c9cdb1f660e3ea3ffa94c2f14047a3cf94ef662872046"} Oct 03 16:49:59 crc kubenswrapper[5081]: I1003 16:49:59.844284 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6495f5bb-e9e9-40c9-b95c-0b113eee600e" path="/var/lib/kubelet/pods/6495f5bb-e9e9-40c9-b95c-0b113eee600e/volumes" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.088133 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.112820 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa/mariadb-client-7-default/0.log" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.152405 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.159547 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.175177 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-db9n6\" (UniqueName: \"kubernetes.io/projected/e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa-kube-api-access-db9n6\") pod \"e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa\" (UID: \"e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa\") " Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.181278 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa-kube-api-access-db9n6" (OuterVolumeSpecName: "kube-api-access-db9n6") pod "e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa" (UID: "e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa"). InnerVolumeSpecName "kube-api-access-db9n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.277432 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.277846 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-db9n6\" (UniqueName: \"kubernetes.io/projected/e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa-kube-api-access-db9n6\") on node \"crc\" DevicePath \"\"" Oct 03 16:50:01 crc kubenswrapper[5081]: E1003 16:50:01.278097 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa" containerName="mariadb-client-7-default" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.278181 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa" containerName="mariadb-client-7-default" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.278415 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa" containerName="mariadb-client-7-default" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.279008 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.285507 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.379834 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znbvl\" (UniqueName: \"kubernetes.io/projected/6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0-kube-api-access-znbvl\") pod \"mariadb-client-2\" (UID: \"6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0\") " pod="openstack/mariadb-client-2" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.481237 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znbvl\" (UniqueName: \"kubernetes.io/projected/6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0-kube-api-access-znbvl\") pod \"mariadb-client-2\" (UID: \"6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0\") " pod="openstack/mariadb-client-2" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.498285 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znbvl\" (UniqueName: \"kubernetes.io/projected/6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0-kube-api-access-znbvl\") pod \"mariadb-client-2\" (UID: \"6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0\") " pod="openstack/mariadb-client-2" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.594641 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.644054 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40a8e479cb963aba3c5c9cdb1f660e3ea3ffa94c2f14047a3cf94ef662872046" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.644250 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 03 16:50:01 crc kubenswrapper[5081]: I1003 16:50:01.839036 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa" path="/var/lib/kubelet/pods/e8ea2f92-2fdf-40d3-a6f9-ec106ed8c5aa/volumes" Oct 03 16:50:02 crc kubenswrapper[5081]: I1003 16:50:02.088120 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 03 16:50:02 crc kubenswrapper[5081]: I1003 16:50:02.653020 5081 generic.go:334] "Generic (PLEG): container finished" podID="6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0" containerID="c8ec31217d921cb957974cec2359bc4ac273937f97e8e7ba228115a04282ce01" exitCode=0 Oct 03 16:50:02 crc kubenswrapper[5081]: I1003 16:50:02.653065 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0","Type":"ContainerDied","Data":"c8ec31217d921cb957974cec2359bc4ac273937f97e8e7ba228115a04282ce01"} Oct 03 16:50:02 crc kubenswrapper[5081]: I1003 16:50:02.653337 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0","Type":"ContainerStarted","Data":"081c0db41c6f5263b0ecc76f9348df416f93df3be14e078c76718e2bac724562"} Oct 03 16:50:03 crc kubenswrapper[5081]: I1003 16:50:03.986800 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.004486 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0/mariadb-client-2/0.log" Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.030940 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.035817 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.133833 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znbvl\" (UniqueName: \"kubernetes.io/projected/6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0-kube-api-access-znbvl\") pod \"6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0\" (UID: \"6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0\") " Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.140971 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0-kube-api-access-znbvl" (OuterVolumeSpecName: "kube-api-access-znbvl") pod "6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0" (UID: "6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0"). InnerVolumeSpecName "kube-api-access-znbvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.235129 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znbvl\" (UniqueName: \"kubernetes.io/projected/6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0-kube-api-access-znbvl\") on node \"crc\" DevicePath \"\"" Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.668431 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="081c0db41c6f5263b0ecc76f9348df416f93df3be14e078c76718e2bac724562" Oct 03 16:50:04 crc kubenswrapper[5081]: I1003 16:50:04.668468 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 03 16:50:05 crc kubenswrapper[5081]: I1003 16:50:05.836996 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0" path="/var/lib/kubelet/pods/6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0/volumes" Oct 03 16:50:22 crc kubenswrapper[5081]: I1003 16:50:22.278070 5081 scope.go:117] "RemoveContainer" containerID="1ad392a8baeb0eaf84cc391e0ccee1a6bada9c8f709f7d2c12309dc5d9876832" Oct 03 16:50:30 crc kubenswrapper[5081]: I1003 16:50:30.647308 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:50:30 crc kubenswrapper[5081]: I1003 16:50:30.647846 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:51:00 crc kubenswrapper[5081]: I1003 16:51:00.648457 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:51:00 crc kubenswrapper[5081]: I1003 16:51:00.649634 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:51:30 crc kubenswrapper[5081]: I1003 16:51:30.647421 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:51:30 crc kubenswrapper[5081]: I1003 16:51:30.647907 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:51:30 crc kubenswrapper[5081]: I1003 16:51:30.647953 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 16:51:30 crc kubenswrapper[5081]: I1003 16:51:30.648469 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 16:51:30 crc kubenswrapper[5081]: I1003 16:51:30.648516 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" gracePeriod=600 Oct 03 16:51:30 crc kubenswrapper[5081]: E1003 16:51:30.765796 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:51:31 crc kubenswrapper[5081]: I1003 16:51:31.306444 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" exitCode=0 Oct 03 16:51:31 crc kubenswrapper[5081]: I1003 16:51:31.306489 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9"} Oct 03 16:51:31 crc kubenswrapper[5081]: I1003 16:51:31.306526 5081 scope.go:117] "RemoveContainer" containerID="35faa1c942b89ebc4799ceb91808709fd7e10cbf620370c3e31f58d6810401b7" Oct 03 16:51:31 crc kubenswrapper[5081]: I1003 16:51:31.307074 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:51:31 crc kubenswrapper[5081]: E1003 16:51:31.307315 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:51:45 crc kubenswrapper[5081]: I1003 16:51:45.828045 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:51:45 crc kubenswrapper[5081]: E1003 16:51:45.829352 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:51:56 crc kubenswrapper[5081]: I1003 16:51:56.827444 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:51:56 crc kubenswrapper[5081]: E1003 16:51:56.828303 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:52:10 crc kubenswrapper[5081]: I1003 16:52:10.827891 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:52:10 crc kubenswrapper[5081]: E1003 16:52:10.828691 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:52:22 crc kubenswrapper[5081]: I1003 16:52:22.828249 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:52:22 crc kubenswrapper[5081]: E1003 16:52:22.829159 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:52:37 crc kubenswrapper[5081]: I1003 16:52:37.828200 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:52:37 crc kubenswrapper[5081]: E1003 16:52:37.829613 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:52:50 crc kubenswrapper[5081]: I1003 16:52:50.827123 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:52:50 crc kubenswrapper[5081]: E1003 16:52:50.827988 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:53:01 crc kubenswrapper[5081]: I1003 16:53:01.832642 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:53:01 crc kubenswrapper[5081]: E1003 16:53:01.833575 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:53:15 crc kubenswrapper[5081]: I1003 16:53:15.827389 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:53:15 crc kubenswrapper[5081]: E1003 16:53:15.828137 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.582478 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4ggtk"] Oct 03 16:53:20 crc kubenswrapper[5081]: E1003 16:53:20.584926 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0" containerName="mariadb-client-2" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.584949 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0" containerName="mariadb-client-2" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.585153 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f3ff5e9-8199-4e66-bbc1-83e45bdadbf0" containerName="mariadb-client-2" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.586442 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.590811 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4ggtk"] Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.689001 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-utilities\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.689093 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9bcx\" (UniqueName: \"kubernetes.io/projected/e0e83760-414b-4c8a-b434-8ef6e39de480-kube-api-access-f9bcx\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.689144 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-catalog-content\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.790097 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-catalog-content\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.790203 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-utilities\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.790238 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9bcx\" (UniqueName: \"kubernetes.io/projected/e0e83760-414b-4c8a-b434-8ef6e39de480-kube-api-access-f9bcx\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.790756 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-catalog-content\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.790915 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-utilities\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.812181 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9bcx\" (UniqueName: \"kubernetes.io/projected/e0e83760-414b-4c8a-b434-8ef6e39de480-kube-api-access-f9bcx\") pod \"redhat-operators-4ggtk\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:20 crc kubenswrapper[5081]: I1003 16:53:20.921488 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:21 crc kubenswrapper[5081]: I1003 16:53:21.370437 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4ggtk"] Oct 03 16:53:22 crc kubenswrapper[5081]: I1003 16:53:22.144736 5081 generic.go:334] "Generic (PLEG): container finished" podID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerID="e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559" exitCode=0 Oct 03 16:53:22 crc kubenswrapper[5081]: I1003 16:53:22.144793 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4ggtk" event={"ID":"e0e83760-414b-4c8a-b434-8ef6e39de480","Type":"ContainerDied","Data":"e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559"} Oct 03 16:53:22 crc kubenswrapper[5081]: I1003 16:53:22.145044 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4ggtk" event={"ID":"e0e83760-414b-4c8a-b434-8ef6e39de480","Type":"ContainerStarted","Data":"3ccd0100dcf416a3b94c413551b537a6aa611debedf2b71ab320aba1df664a09"} Oct 03 16:53:23 crc kubenswrapper[5081]: I1003 16:53:23.155886 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4ggtk" event={"ID":"e0e83760-414b-4c8a-b434-8ef6e39de480","Type":"ContainerStarted","Data":"311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195"} Oct 03 16:53:24 crc kubenswrapper[5081]: I1003 16:53:24.164853 5081 generic.go:334] "Generic (PLEG): container finished" podID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerID="311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195" exitCode=0 Oct 03 16:53:24 crc kubenswrapper[5081]: I1003 16:53:24.164962 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4ggtk" event={"ID":"e0e83760-414b-4c8a-b434-8ef6e39de480","Type":"ContainerDied","Data":"311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195"} Oct 03 16:53:25 crc kubenswrapper[5081]: I1003 16:53:25.173657 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4ggtk" event={"ID":"e0e83760-414b-4c8a-b434-8ef6e39de480","Type":"ContainerStarted","Data":"b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3"} Oct 03 16:53:25 crc kubenswrapper[5081]: I1003 16:53:25.199148 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4ggtk" podStartSLOduration=2.794185198 podStartE2EDuration="5.199126862s" podCreationTimestamp="2025-10-03 16:53:20 +0000 UTC" firstStartedPulling="2025-10-03 16:53:22.146380542 +0000 UTC m=+5121.111937155" lastFinishedPulling="2025-10-03 16:53:24.551322206 +0000 UTC m=+5123.516878819" observedRunningTime="2025-10-03 16:53:25.192377757 +0000 UTC m=+5124.157934390" watchObservedRunningTime="2025-10-03 16:53:25.199126862 +0000 UTC m=+5124.164683475" Oct 03 16:53:29 crc kubenswrapper[5081]: I1003 16:53:29.828312 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:53:29 crc kubenswrapper[5081]: E1003 16:53:29.829117 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:53:30 crc kubenswrapper[5081]: I1003 16:53:30.922619 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:30 crc kubenswrapper[5081]: I1003 16:53:30.923036 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:30 crc kubenswrapper[5081]: I1003 16:53:30.972355 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:31 crc kubenswrapper[5081]: I1003 16:53:31.252503 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:31 crc kubenswrapper[5081]: I1003 16:53:31.296347 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4ggtk"] Oct 03 16:53:33 crc kubenswrapper[5081]: I1003 16:53:33.227292 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4ggtk" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="registry-server" containerID="cri-o://b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3" gracePeriod=2 Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.145783 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.192084 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-utilities\") pod \"e0e83760-414b-4c8a-b434-8ef6e39de480\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.192225 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9bcx\" (UniqueName: \"kubernetes.io/projected/e0e83760-414b-4c8a-b434-8ef6e39de480-kube-api-access-f9bcx\") pod \"e0e83760-414b-4c8a-b434-8ef6e39de480\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.192284 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-catalog-content\") pod \"e0e83760-414b-4c8a-b434-8ef6e39de480\" (UID: \"e0e83760-414b-4c8a-b434-8ef6e39de480\") " Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.193097 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-utilities" (OuterVolumeSpecName: "utilities") pod "e0e83760-414b-4c8a-b434-8ef6e39de480" (UID: "e0e83760-414b-4c8a-b434-8ef6e39de480"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.197245 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0e83760-414b-4c8a-b434-8ef6e39de480-kube-api-access-f9bcx" (OuterVolumeSpecName: "kube-api-access-f9bcx") pod "e0e83760-414b-4c8a-b434-8ef6e39de480" (UID: "e0e83760-414b-4c8a-b434-8ef6e39de480"). InnerVolumeSpecName "kube-api-access-f9bcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.237510 5081 generic.go:334] "Generic (PLEG): container finished" podID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerID="b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3" exitCode=0 Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.237573 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4ggtk" event={"ID":"e0e83760-414b-4c8a-b434-8ef6e39de480","Type":"ContainerDied","Data":"b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3"} Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.237598 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4ggtk" event={"ID":"e0e83760-414b-4c8a-b434-8ef6e39de480","Type":"ContainerDied","Data":"3ccd0100dcf416a3b94c413551b537a6aa611debedf2b71ab320aba1df664a09"} Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.237615 5081 scope.go:117] "RemoveContainer" containerID="b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.237743 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4ggtk" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.257384 5081 scope.go:117] "RemoveContainer" containerID="311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.276491 5081 scope.go:117] "RemoveContainer" containerID="e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.293946 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.293974 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9bcx\" (UniqueName: \"kubernetes.io/projected/e0e83760-414b-4c8a-b434-8ef6e39de480-kube-api-access-f9bcx\") on node \"crc\" DevicePath \"\"" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.308760 5081 scope.go:117] "RemoveContainer" containerID="b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3" Oct 03 16:53:34 crc kubenswrapper[5081]: E1003 16:53:34.309442 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3\": container with ID starting with b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3 not found: ID does not exist" containerID="b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.309473 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3"} err="failed to get container status \"b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3\": rpc error: code = NotFound desc = could not find container \"b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3\": container with ID starting with b46d2af3db97bb5c7dc80d7daa964159746e0db02d2580980e89715a5f1122a3 not found: ID does not exist" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.309500 5081 scope.go:117] "RemoveContainer" containerID="311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195" Oct 03 16:53:34 crc kubenswrapper[5081]: E1003 16:53:34.309766 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195\": container with ID starting with 311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195 not found: ID does not exist" containerID="311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.309797 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195"} err="failed to get container status \"311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195\": rpc error: code = NotFound desc = could not find container \"311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195\": container with ID starting with 311597aa3378b09ef77ea077362d6ab289f986a6237a46d5ad06d96bf55e9195 not found: ID does not exist" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.309813 5081 scope.go:117] "RemoveContainer" containerID="e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559" Oct 03 16:53:34 crc kubenswrapper[5081]: E1003 16:53:34.310124 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559\": container with ID starting with e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559 not found: ID does not exist" containerID="e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559" Oct 03 16:53:34 crc kubenswrapper[5081]: I1003 16:53:34.310152 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559"} err="failed to get container status \"e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559\": rpc error: code = NotFound desc = could not find container \"e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559\": container with ID starting with e0d57c733314df9ed4e99c1f519c124f2149529eeb23d05662dfbdc14de15559 not found: ID does not exist" Oct 03 16:53:35 crc kubenswrapper[5081]: I1003 16:53:35.018004 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0e83760-414b-4c8a-b434-8ef6e39de480" (UID: "e0e83760-414b-4c8a-b434-8ef6e39de480"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:53:35 crc kubenswrapper[5081]: I1003 16:53:35.105405 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e83760-414b-4c8a-b434-8ef6e39de480-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:53:35 crc kubenswrapper[5081]: I1003 16:53:35.165483 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4ggtk"] Oct 03 16:53:35 crc kubenswrapper[5081]: I1003 16:53:35.170774 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4ggtk"] Oct 03 16:53:35 crc kubenswrapper[5081]: I1003 16:53:35.845139 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" path="/var/lib/kubelet/pods/e0e83760-414b-4c8a-b434-8ef6e39de480/volumes" Oct 03 16:53:44 crc kubenswrapper[5081]: I1003 16:53:44.827595 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:53:44 crc kubenswrapper[5081]: E1003 16:53:44.828115 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:53:55 crc kubenswrapper[5081]: I1003 16:53:55.828455 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:53:55 crc kubenswrapper[5081]: E1003 16:53:55.829405 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:54:07 crc kubenswrapper[5081]: I1003 16:54:07.827263 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:54:07 crc kubenswrapper[5081]: E1003 16:54:07.828233 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:54:21 crc kubenswrapper[5081]: I1003 16:54:21.832196 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:54:21 crc kubenswrapper[5081]: E1003 16:54:21.832913 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:54:32 crc kubenswrapper[5081]: I1003 16:54:32.828185 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:54:32 crc kubenswrapper[5081]: E1003 16:54:32.829162 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:54:46 crc kubenswrapper[5081]: I1003 16:54:46.828389 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:54:46 crc kubenswrapper[5081]: E1003 16:54:46.829262 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.976638 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Oct 03 16:54:48 crc kubenswrapper[5081]: E1003 16:54:48.977377 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="registry-server" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.977395 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="registry-server" Oct 03 16:54:48 crc kubenswrapper[5081]: E1003 16:54:48.977423 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="extract-content" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.977433 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="extract-content" Oct 03 16:54:48 crc kubenswrapper[5081]: E1003 16:54:48.977457 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="extract-utilities" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.977465 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="extract-utilities" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.977659 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0e83760-414b-4c8a-b434-8ef6e39de480" containerName="registry-server" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.978306 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.980724 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-46z6g" Oct 03 16:54:48 crc kubenswrapper[5081]: I1003 16:54:48.984580 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.049579 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfm7p\" (UniqueName: \"kubernetes.io/projected/21ae62aa-1b0a-4995-9562-54b29b558240-kube-api-access-lfm7p\") pod \"mariadb-copy-data\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.049977 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-718c963e-fccf-4bc5-9279-683879aa763d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d\") pod \"mariadb-copy-data\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.151150 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfm7p\" (UniqueName: \"kubernetes.io/projected/21ae62aa-1b0a-4995-9562-54b29b558240-kube-api-access-lfm7p\") pod \"mariadb-copy-data\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.151255 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-718c963e-fccf-4bc5-9279-683879aa763d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d\") pod \"mariadb-copy-data\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.154430 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.154501 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-718c963e-fccf-4bc5-9279-683879aa763d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d\") pod \"mariadb-copy-data\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b28b738a8d17722521d08df983828865eb8bc72b9a1850a7640079d03030d06f/globalmount\"" pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.172116 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfm7p\" (UniqueName: \"kubernetes.io/projected/21ae62aa-1b0a-4995-9562-54b29b558240-kube-api-access-lfm7p\") pod \"mariadb-copy-data\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.182430 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-718c963e-fccf-4bc5-9279-683879aa763d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d\") pod \"mariadb-copy-data\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.296741 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.781383 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 03 16:54:49 crc kubenswrapper[5081]: I1003 16:54:49.820653 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"21ae62aa-1b0a-4995-9562-54b29b558240","Type":"ContainerStarted","Data":"41d229f25761f383ca00bfec94fe4928e86638891e89c79489a5e9aaed3ca8bb"} Oct 03 16:54:50 crc kubenswrapper[5081]: I1003 16:54:50.830309 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"21ae62aa-1b0a-4995-9562-54b29b558240","Type":"ContainerStarted","Data":"55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d"} Oct 03 16:54:50 crc kubenswrapper[5081]: I1003 16:54:50.850527 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.850507466 podStartE2EDuration="3.850507466s" podCreationTimestamp="2025-10-03 16:54:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:54:50.842917428 +0000 UTC m=+5209.808474061" watchObservedRunningTime="2025-10-03 16:54:50.850507466 +0000 UTC m=+5209.816064079" Oct 03 16:54:52 crc kubenswrapper[5081]: I1003 16:54:52.573880 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:52 crc kubenswrapper[5081]: I1003 16:54:52.575038 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:52 crc kubenswrapper[5081]: I1003 16:54:52.583294 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:52 crc kubenswrapper[5081]: I1003 16:54:52.602138 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bgf4\" (UniqueName: \"kubernetes.io/projected/384c0750-6d06-4067-a87b-a7d9643f2812-kube-api-access-6bgf4\") pod \"mariadb-client\" (UID: \"384c0750-6d06-4067-a87b-a7d9643f2812\") " pod="openstack/mariadb-client" Oct 03 16:54:52 crc kubenswrapper[5081]: I1003 16:54:52.703021 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bgf4\" (UniqueName: \"kubernetes.io/projected/384c0750-6d06-4067-a87b-a7d9643f2812-kube-api-access-6bgf4\") pod \"mariadb-client\" (UID: \"384c0750-6d06-4067-a87b-a7d9643f2812\") " pod="openstack/mariadb-client" Oct 03 16:54:52 crc kubenswrapper[5081]: I1003 16:54:52.721399 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bgf4\" (UniqueName: \"kubernetes.io/projected/384c0750-6d06-4067-a87b-a7d9643f2812-kube-api-access-6bgf4\") pod \"mariadb-client\" (UID: \"384c0750-6d06-4067-a87b-a7d9643f2812\") " pod="openstack/mariadb-client" Oct 03 16:54:52 crc kubenswrapper[5081]: I1003 16:54:52.899277 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:53 crc kubenswrapper[5081]: I1003 16:54:53.335908 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:53 crc kubenswrapper[5081]: I1003 16:54:53.859541 5081 generic.go:334] "Generic (PLEG): container finished" podID="384c0750-6d06-4067-a87b-a7d9643f2812" containerID="02437d0faa0aecfb6b8963665eddbcd628bf80b9de3bcfe3a22a90e905094864" exitCode=0 Oct 03 16:54:53 crc kubenswrapper[5081]: I1003 16:54:53.859620 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"384c0750-6d06-4067-a87b-a7d9643f2812","Type":"ContainerDied","Data":"02437d0faa0aecfb6b8963665eddbcd628bf80b9de3bcfe3a22a90e905094864"} Oct 03 16:54:53 crc kubenswrapper[5081]: I1003 16:54:53.859882 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"384c0750-6d06-4067-a87b-a7d9643f2812","Type":"ContainerStarted","Data":"89037f7af87bcd8c35ad621087c58a9ea9ba7be3b3f3a87761970e9355f714d7"} Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.145025 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.165021 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_384c0750-6d06-4067-a87b-a7d9643f2812/mariadb-client/0.log" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.189095 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.194598 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.239443 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bgf4\" (UniqueName: \"kubernetes.io/projected/384c0750-6d06-4067-a87b-a7d9643f2812-kube-api-access-6bgf4\") pod \"384c0750-6d06-4067-a87b-a7d9643f2812\" (UID: \"384c0750-6d06-4067-a87b-a7d9643f2812\") " Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.247984 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/384c0750-6d06-4067-a87b-a7d9643f2812-kube-api-access-6bgf4" (OuterVolumeSpecName: "kube-api-access-6bgf4") pod "384c0750-6d06-4067-a87b-a7d9643f2812" (UID: "384c0750-6d06-4067-a87b-a7d9643f2812"). InnerVolumeSpecName "kube-api-access-6bgf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.313313 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:55 crc kubenswrapper[5081]: E1003 16:54:55.313677 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="384c0750-6d06-4067-a87b-a7d9643f2812" containerName="mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.313699 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="384c0750-6d06-4067-a87b-a7d9643f2812" containerName="mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.313847 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="384c0750-6d06-4067-a87b-a7d9643f2812" containerName="mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.314447 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.322661 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.342335 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7bsr\" (UniqueName: \"kubernetes.io/projected/22f12292-730a-42b5-882c-dfa65e576b7e-kube-api-access-l7bsr\") pod \"mariadb-client\" (UID: \"22f12292-730a-42b5-882c-dfa65e576b7e\") " pod="openstack/mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.342800 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bgf4\" (UniqueName: \"kubernetes.io/projected/384c0750-6d06-4067-a87b-a7d9643f2812-kube-api-access-6bgf4\") on node \"crc\" DevicePath \"\"" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.444526 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7bsr\" (UniqueName: \"kubernetes.io/projected/22f12292-730a-42b5-882c-dfa65e576b7e-kube-api-access-l7bsr\") pod \"mariadb-client\" (UID: \"22f12292-730a-42b5-882c-dfa65e576b7e\") " pod="openstack/mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.460767 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7bsr\" (UniqueName: \"kubernetes.io/projected/22f12292-730a-42b5-882c-dfa65e576b7e-kube-api-access-l7bsr\") pod \"mariadb-client\" (UID: \"22f12292-730a-42b5-882c-dfa65e576b7e\") " pod="openstack/mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.639794 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.835787 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="384c0750-6d06-4067-a87b-a7d9643f2812" path="/var/lib/kubelet/pods/384c0750-6d06-4067-a87b-a7d9643f2812/volumes" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.876404 5081 scope.go:117] "RemoveContainer" containerID="02437d0faa0aecfb6b8963665eddbcd628bf80b9de3bcfe3a22a90e905094864" Oct 03 16:54:55 crc kubenswrapper[5081]: I1003 16:54:55.876435 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:56 crc kubenswrapper[5081]: I1003 16:54:56.022370 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:56 crc kubenswrapper[5081]: I1003 16:54:56.885145 5081 generic.go:334] "Generic (PLEG): container finished" podID="22f12292-730a-42b5-882c-dfa65e576b7e" containerID="96fbb2c020509860763b0f28e3fed45f8510de4a0bcb71a092aac3bad6d6e217" exitCode=0 Oct 03 16:54:56 crc kubenswrapper[5081]: I1003 16:54:56.885258 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"22f12292-730a-42b5-882c-dfa65e576b7e","Type":"ContainerDied","Data":"96fbb2c020509860763b0f28e3fed45f8510de4a0bcb71a092aac3bad6d6e217"} Oct 03 16:54:56 crc kubenswrapper[5081]: I1003 16:54:56.885837 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"22f12292-730a-42b5-882c-dfa65e576b7e","Type":"ContainerStarted","Data":"306f735b8a72a0df234691437661e671fc37ff336054ceed1b614679e8b827b3"} Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.174160 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.187437 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7bsr\" (UniqueName: \"kubernetes.io/projected/22f12292-730a-42b5-882c-dfa65e576b7e-kube-api-access-l7bsr\") pod \"22f12292-730a-42b5-882c-dfa65e576b7e\" (UID: \"22f12292-730a-42b5-882c-dfa65e576b7e\") " Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.194332 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_22f12292-730a-42b5-882c-dfa65e576b7e/mariadb-client/0.log" Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.194499 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22f12292-730a-42b5-882c-dfa65e576b7e-kube-api-access-l7bsr" (OuterVolumeSpecName: "kube-api-access-l7bsr") pod "22f12292-730a-42b5-882c-dfa65e576b7e" (UID: "22f12292-730a-42b5-882c-dfa65e576b7e"). InnerVolumeSpecName "kube-api-access-l7bsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.220175 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.227011 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.290275 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7bsr\" (UniqueName: \"kubernetes.io/projected/22f12292-730a-42b5-882c-dfa65e576b7e-kube-api-access-l7bsr\") on node \"crc\" DevicePath \"\"" Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.902745 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="306f735b8a72a0df234691437661e671fc37ff336054ceed1b614679e8b827b3" Oct 03 16:54:58 crc kubenswrapper[5081]: I1003 16:54:58.902818 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 03 16:54:59 crc kubenswrapper[5081]: I1003 16:54:59.827843 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:54:59 crc kubenswrapper[5081]: E1003 16:54:59.829320 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:54:59 crc kubenswrapper[5081]: I1003 16:54:59.836485 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22f12292-730a-42b5-882c-dfa65e576b7e" path="/var/lib/kubelet/pods/22f12292-730a-42b5-882c-dfa65e576b7e/volumes" Oct 03 16:55:12 crc kubenswrapper[5081]: I1003 16:55:12.828121 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:55:12 crc kubenswrapper[5081]: E1003 16:55:12.828975 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:55:24 crc kubenswrapper[5081]: I1003 16:55:24.828384 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:55:24 crc kubenswrapper[5081]: E1003 16:55:24.829154 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.658680 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 16:55:29 crc kubenswrapper[5081]: E1003 16:55:29.659309 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f12292-730a-42b5-882c-dfa65e576b7e" containerName="mariadb-client" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.659325 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f12292-730a-42b5-882c-dfa65e576b7e" containerName="mariadb-client" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.659485 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f12292-730a-42b5-882c-dfa65e576b7e" containerName="mariadb-client" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.660380 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.662652 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.662825 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-wwl7n" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.662898 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.705170 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.715828 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.717585 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.727864 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.729693 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.741087 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.747068 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.766423 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2vrh\" (UniqueName: \"kubernetes.io/projected/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-kube-api-access-q2vrh\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.766471 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.766524 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.766621 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.766649 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-config\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.766675 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868382 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868434 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nxwx\" (UniqueName: \"kubernetes.io/projected/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-kube-api-access-7nxwx\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868467 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2vrh\" (UniqueName: \"kubernetes.io/projected/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-kube-api-access-q2vrh\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868492 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868521 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868550 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868582 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868609 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868637 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868665 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-config\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868712 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868752 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868785 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868821 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-config\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868846 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-config\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868862 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868878 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8k7v\" (UniqueName: \"kubernetes.io/projected/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-kube-api-access-x8k7v\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.868899 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.869401 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.869924 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-config\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.869940 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.871496 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.871544 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9e7ddd22532d8296e60c6889749d38dc9fcf6745d54d6eaa37c5794120f06ae1/globalmount\"" pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.877047 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.891821 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2vrh\" (UniqueName: \"kubernetes.io/projected/c9ad8da2-f647-4cc9-ba8c-081118e9c54d-kube-api-access-q2vrh\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.893928 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.895706 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.906487 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.906592 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-wsswb" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.906505 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.912427 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.914391 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.922627 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.923991 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.932992 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.942228 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.942746 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e1584a92-68b8-42a0-8c37-87e5784b887e\") pod \"ovsdbserver-nb-0\" (UID: \"c9ad8da2-f647-4cc9-ba8c-081118e9c54d\") " pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.953115 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970357 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8k7v\" (UniqueName: \"kubernetes.io/projected/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-kube-api-access-x8k7v\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970423 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970466 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e83a75a-8fba-4c53-8e12-098062e659a8-config\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970507 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nxwx\" (UniqueName: \"kubernetes.io/projected/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-kube-api-access-7nxwx\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970579 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970631 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970674 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhb9m\" (UniqueName: \"kubernetes.io/projected/7e83a75a-8fba-4c53-8e12-098062e659a8-kube-api-access-bhb9m\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970701 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970735 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970779 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7e83a75a-8fba-4c53-8e12-098062e659a8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970796 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-config\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970814 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970856 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970872 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970890 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e83a75a-8fba-4c53-8e12-098062e659a8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970953 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-config\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970969 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e83a75a-8fba-4c53-8e12-098062e659a8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.970986 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.972182 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.972208 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-config\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.972224 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-config\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.972429 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.972581 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.973632 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.973749 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.974299 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.974326 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7a9664c5589701ca7e55d53da9af0e316cd1761d87eb75db44c59588de27d7f6/globalmount\"" pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.975214 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.975240 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/284b6e7ced157312facd0b3f0ee4006e039bdb0f40c2a1051a76d9dc2802ba5c/globalmount\"" pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.975549 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.992405 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.992740 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nxwx\" (UniqueName: \"kubernetes.io/projected/9ff15d64-ab68-41c5-9cac-bd3f5bc8b019-kube-api-access-7nxwx\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:29 crc kubenswrapper[5081]: I1003 16:55:29.993178 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8k7v\" (UniqueName: \"kubernetes.io/projected/a9bc237e-7713-46e2-b1ee-aa2bdd9e724f-kube-api-access-x8k7v\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.006294 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-693302f6-ff5a-4bc1-9f47-ade138cc326d\") pod \"ovsdbserver-nb-1\" (UID: \"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f\") " pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.008900 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27594c6c-2160-4e25-b3c6-d81312b2e853\") pod \"ovsdbserver-nb-2\" (UID: \"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019\") " pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.043484 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.057678 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072214 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhb9m\" (UniqueName: \"kubernetes.io/projected/7e83a75a-8fba-4c53-8e12-098062e659a8-kube-api-access-bhb9m\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072272 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7e83a75a-8fba-4c53-8e12-098062e659a8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072300 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072325 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f003a0c-17ad-436e-b8a6-9de8188717d1-config\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072344 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f003a0c-17ad-436e-b8a6-9de8188717d1-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072364 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072379 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e83a75a-8fba-4c53-8e12-098062e659a8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072400 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-956hg\" (UniqueName: \"kubernetes.io/projected/9afa3095-5432-4475-bd7a-cd4f76cac607-kube-api-access-956hg\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072424 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhg26\" (UniqueName: \"kubernetes.io/projected/3f003a0c-17ad-436e-b8a6-9de8188717d1-kube-api-access-jhg26\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072440 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9afa3095-5432-4475-bd7a-cd4f76cac607-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072466 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9afa3095-5432-4475-bd7a-cd4f76cac607-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072482 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e83a75a-8fba-4c53-8e12-098062e659a8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072500 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e83a75a-8fba-4c53-8e12-098062e659a8-config\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072518 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f003a0c-17ad-436e-b8a6-9de8188717d1-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072543 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f003a0c-17ad-436e-b8a6-9de8188717d1-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072582 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9afa3095-5432-4475-bd7a-cd4f76cac607-config\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072605 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9afa3095-5432-4475-bd7a-cd4f76cac607-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072632 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.072903 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7e83a75a-8fba-4c53-8e12-098062e659a8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.074003 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e83a75a-8fba-4c53-8e12-098062e659a8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.074914 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e83a75a-8fba-4c53-8e12-098062e659a8-config\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.078797 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.078825 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e113fa678d741f48c883ff9c932b4532e5971efa06ae319f30cf7048ac365e14/globalmount\"" pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.079739 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e83a75a-8fba-4c53-8e12-098062e659a8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.092851 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhb9m\" (UniqueName: \"kubernetes.io/projected/7e83a75a-8fba-4c53-8e12-098062e659a8-kube-api-access-bhb9m\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.109788 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a6d7a2d0-f03a-4b9c-a18b-5e3efacf3f8b\") pod \"ovsdbserver-sb-0\" (UID: \"7e83a75a-8fba-4c53-8e12-098062e659a8\") " pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.173981 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174036 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f003a0c-17ad-436e-b8a6-9de8188717d1-config\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174062 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f003a0c-17ad-436e-b8a6-9de8188717d1-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174087 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-956hg\" (UniqueName: \"kubernetes.io/projected/9afa3095-5432-4475-bd7a-cd4f76cac607-kube-api-access-956hg\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174119 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhg26\" (UniqueName: \"kubernetes.io/projected/3f003a0c-17ad-436e-b8a6-9de8188717d1-kube-api-access-jhg26\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174140 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9afa3095-5432-4475-bd7a-cd4f76cac607-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174176 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9afa3095-5432-4475-bd7a-cd4f76cac607-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174209 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f003a0c-17ad-436e-b8a6-9de8188717d1-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174237 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f003a0c-17ad-436e-b8a6-9de8188717d1-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174264 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9afa3095-5432-4475-bd7a-cd4f76cac607-config\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174291 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9afa3095-5432-4475-bd7a-cd4f76cac607-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.174333 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.176298 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f003a0c-17ad-436e-b8a6-9de8188717d1-config\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.177167 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f003a0c-17ad-436e-b8a6-9de8188717d1-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.177673 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9afa3095-5432-4475-bd7a-cd4f76cac607-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.178580 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9afa3095-5432-4475-bd7a-cd4f76cac607-config\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.179303 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9afa3095-5432-4475-bd7a-cd4f76cac607-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.180655 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f003a0c-17ad-436e-b8a6-9de8188717d1-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.182650 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.182691 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f39a8ea3545830b4fdd4051bffb0105c3d6fa5eea74eb14a213ef958a0fb77bc/globalmount\"" pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.182925 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.182974 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f68e412d8620346fe2368355fa1b04fd6b8bd8f51bb0974a56bda638054bd5ff/globalmount\"" pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.183395 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f003a0c-17ad-436e-b8a6-9de8188717d1-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.191424 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9afa3095-5432-4475-bd7a-cd4f76cac607-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.195193 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhg26\" (UniqueName: \"kubernetes.io/projected/3f003a0c-17ad-436e-b8a6-9de8188717d1-kube-api-access-jhg26\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.196888 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-956hg\" (UniqueName: \"kubernetes.io/projected/9afa3095-5432-4475-bd7a-cd4f76cac607-kube-api-access-956hg\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.222411 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-14e1c821-b509-4be3-9f2b-a7265776d6b7\") pod \"ovsdbserver-sb-1\" (UID: \"3f003a0c-17ad-436e-b8a6-9de8188717d1\") " pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.223027 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f58c0b9f-7ad3-4a65-8ebb-ef03cf7d788a\") pod \"ovsdbserver-sb-2\" (UID: \"9afa3095-5432-4475-bd7a-cd4f76cac607\") " pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.350983 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.360671 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.389035 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.607625 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 16:55:30 crc kubenswrapper[5081]: I1003 16:55:30.676239 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 03 16:55:30 crc kubenswrapper[5081]: W1003 16:55:30.682086 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9bc237e_7713_46e2_b1ee_aa2bdd9e724f.slice/crio-8a6b9d1cee6fba07c5beace9a78661b736c976d1a61f6644cd1a4f26ffee8392 WatchSource:0}: Error finding container 8a6b9d1cee6fba07c5beace9a78661b736c976d1a61f6644cd1a4f26ffee8392: Status 404 returned error can't find the container with id 8a6b9d1cee6fba07c5beace9a78661b736c976d1a61f6644cd1a4f26ffee8392 Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.026758 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 16:55:31 crc kubenswrapper[5081]: W1003 16:55:31.029985 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e83a75a_8fba_4c53_8e12_098062e659a8.slice/crio-823a362ad411ff98622805626c2d7f7534dd5e9815a2d66d3064e78b2241012a WatchSource:0}: Error finding container 823a362ad411ff98622805626c2d7f7534dd5e9815a2d66d3064e78b2241012a: Status 404 returned error can't find the container with id 823a362ad411ff98622805626c2d7f7534dd5e9815a2d66d3064e78b2241012a Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.119660 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.169370 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"9afa3095-5432-4475-bd7a-cd4f76cac607","Type":"ContainerStarted","Data":"ab8af25cca544080ed9159b763aeebe8624f7ad2ba021346a060bdd52dd5344c"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.176844 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c9ad8da2-f647-4cc9-ba8c-081118e9c54d","Type":"ContainerStarted","Data":"6664bef80700b8a1f4c6458c1fcf390a1befa4d75e8fc1bdb5efb0cb6a9ee34c"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.176892 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c9ad8da2-f647-4cc9-ba8c-081118e9c54d","Type":"ContainerStarted","Data":"af38454cc6bb2debfb18eaca8d173ec5956bd6c2531ae7033dc1f44f82e59be4"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.176901 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c9ad8da2-f647-4cc9-ba8c-081118e9c54d","Type":"ContainerStarted","Data":"ffa59acd5d363db06116320858f72fe066d42673db9faa984e8994392b261d18"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.182384 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7e83a75a-8fba-4c53-8e12-098062e659a8","Type":"ContainerStarted","Data":"823a362ad411ff98622805626c2d7f7534dd5e9815a2d66d3064e78b2241012a"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.184361 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f","Type":"ContainerStarted","Data":"4333edb5518984fd2454d6ea5eebd64feafa76a576c72cdee1f169cca9bb3f01"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.184411 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f","Type":"ContainerStarted","Data":"36a8805b65643ee159a1902d70f268f8561afdbcef77c3f151c02c96d86d09a4"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.184428 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"a9bc237e-7713-46e2-b1ee-aa2bdd9e724f","Type":"ContainerStarted","Data":"8a6b9d1cee6fba07c5beace9a78661b736c976d1a61f6644cd1a4f26ffee8392"} Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.204477 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.204460317 podStartE2EDuration="3.204460317s" podCreationTimestamp="2025-10-03 16:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:31.202613624 +0000 UTC m=+5250.168170237" watchObservedRunningTime="2025-10-03 16:55:31.204460317 +0000 UTC m=+5250.170016930" Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.223310 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.223292999 podStartE2EDuration="3.223292999s" podCreationTimestamp="2025-10-03 16:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:31.221062725 +0000 UTC m=+5250.186619328" watchObservedRunningTime="2025-10-03 16:55:31.223292999 +0000 UTC m=+5250.188849612" Oct 03 16:55:31 crc kubenswrapper[5081]: I1003 16:55:31.251195 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.004587 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 03 16:55:32 crc kubenswrapper[5081]: W1003 16:55:32.009871 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f003a0c_17ad_436e_b8a6_9de8188717d1.slice/crio-e2f9fa321e0568d8eafb79d880d8d271937d7b4578dfe63fbb51a76d7c6f3b7a WatchSource:0}: Error finding container e2f9fa321e0568d8eafb79d880d8d271937d7b4578dfe63fbb51a76d7c6f3b7a: Status 404 returned error can't find the container with id e2f9fa321e0568d8eafb79d880d8d271937d7b4578dfe63fbb51a76d7c6f3b7a Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.194918 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7e83a75a-8fba-4c53-8e12-098062e659a8","Type":"ContainerStarted","Data":"dfd1295fa8a2f08b4a0e17edb5b3527b893139d3ffc7e63a56594c4fc44f3225"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.195297 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7e83a75a-8fba-4c53-8e12-098062e659a8","Type":"ContainerStarted","Data":"17c4dd04a6e4913885985ae3d67ad328530cac34defd1189fb87366f54185926"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.196827 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"9afa3095-5432-4475-bd7a-cd4f76cac607","Type":"ContainerStarted","Data":"e292ffc3f7cc04b41e3d1dbea0496f711af91a4ecf6df9bb56d074e2103d5ca7"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.196865 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"9afa3095-5432-4475-bd7a-cd4f76cac607","Type":"ContainerStarted","Data":"63393d443ed981e7c505408ed809c89d1c0b4beec617eaab9ef9a80a5cd3857b"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.200196 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019","Type":"ContainerStarted","Data":"6b2db2b2342541f49842cbd871a581aec42b26482ee5aeaa8ccb119af96e3b44"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.200223 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019","Type":"ContainerStarted","Data":"5f1701b9e0fe12283ee81dbb9c47e30259f58de6b4789d4f18c35d087d8f0189"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.200232 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"9ff15d64-ab68-41c5-9cac-bd3f5bc8b019","Type":"ContainerStarted","Data":"ed54ff3e6bbec50580224b36e9c6fa0744503a77a39bb56ebb159598acf69853"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.202117 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"3f003a0c-17ad-436e-b8a6-9de8188717d1","Type":"ContainerStarted","Data":"e2f9fa321e0568d8eafb79d880d8d271937d7b4578dfe63fbb51a76d7c6f3b7a"} Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.217861 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.217838789 podStartE2EDuration="4.217838789s" podCreationTimestamp="2025-10-03 16:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:32.209010775 +0000 UTC m=+5251.174567398" watchObservedRunningTime="2025-10-03 16:55:32.217838789 +0000 UTC m=+5251.183395402" Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.231520 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.231501322 podStartE2EDuration="4.231501322s" podCreationTimestamp="2025-10-03 16:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:32.227094355 +0000 UTC m=+5251.192650988" watchObservedRunningTime="2025-10-03 16:55:32.231501322 +0000 UTC m=+5251.197057935" Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.267275 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.267255071 podStartE2EDuration="4.267255071s" podCreationTimestamp="2025-10-03 16:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:32.244323461 +0000 UTC m=+5251.209880084" watchObservedRunningTime="2025-10-03 16:55:32.267255071 +0000 UTC m=+5251.232811694" Oct 03 16:55:32 crc kubenswrapper[5081]: I1003 16:55:32.993096 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.044650 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.058838 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.098026 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.211573 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"3f003a0c-17ad-436e-b8a6-9de8188717d1","Type":"ContainerStarted","Data":"c298671d47a7222a1cf8906e569d2e2a86aa363bbe4792fbe033229817cc5245"} Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.211625 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"3f003a0c-17ad-436e-b8a6-9de8188717d1","Type":"ContainerStarted","Data":"f46c92508866e4e9fed2108e55738ff24861a954736c63fb7e24554b1af769d1"} Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.212211 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.247496 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=5.247478979 podStartE2EDuration="5.247478979s" podCreationTimestamp="2025-10-03 16:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:33.238254664 +0000 UTC m=+5252.203811277" watchObservedRunningTime="2025-10-03 16:55:33.247478979 +0000 UTC m=+5252.213035592" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.351504 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.361196 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:33 crc kubenswrapper[5081]: I1003 16:55:33.390682 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:34 crc kubenswrapper[5081]: I1003 16:55:34.993314 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.044052 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.099234 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.351229 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.356845 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66668cf647-jvjt6"] Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.362298 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.363087 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.366031 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.374151 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66668cf647-jvjt6"] Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.390245 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.480099 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-dns-svc\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.480186 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgbrk\" (UniqueName: \"kubernetes.io/projected/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-kube-api-access-dgbrk\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.480346 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-config\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.480373 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-ovsdbserver-nb\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.583465 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgbrk\" (UniqueName: \"kubernetes.io/projected/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-kube-api-access-dgbrk\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.583610 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-config\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.583644 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-ovsdbserver-nb\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.583714 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-dns-svc\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.584485 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-config\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.589426 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-ovsdbserver-nb\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.589501 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-dns-svc\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.614659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgbrk\" (UniqueName: \"kubernetes.io/projected/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-kube-api-access-dgbrk\") pod \"dnsmasq-dns-66668cf647-jvjt6\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:35 crc kubenswrapper[5081]: I1003 16:55:35.690912 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.032026 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.072917 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.084312 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.126516 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66668cf647-jvjt6"] Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.238101 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" event={"ID":"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3","Type":"ContainerStarted","Data":"697075395f2379c5ce8b6fc46762b30ffdad45177210a3ceb2d615cf6e713c00"} Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.281922 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.390763 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.408544 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.441874 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.445243 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.488766 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.762266 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66668cf647-jvjt6"] Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.788144 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d76bf78b7-hmd68"] Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.789583 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.792398 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.807389 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d76bf78b7-hmd68"] Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.910951 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc5vg\" (UniqueName: \"kubernetes.io/projected/91a96197-77e4-4185-8390-3a492352ae24-kube-api-access-nc5vg\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.910997 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-nb\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.911098 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-dns-svc\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.911199 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-sb\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:36 crc kubenswrapper[5081]: I1003 16:55:36.911225 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-config\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.012180 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc5vg\" (UniqueName: \"kubernetes.io/projected/91a96197-77e4-4185-8390-3a492352ae24-kube-api-access-nc5vg\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.012224 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-nb\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.012276 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-dns-svc\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.012340 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-sb\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.012363 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-config\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.013294 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-nb\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.013491 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-sb\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.013535 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-config\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.013539 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-dns-svc\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.028947 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc5vg\" (UniqueName: \"kubernetes.io/projected/91a96197-77e4-4185-8390-3a492352ae24-kube-api-access-nc5vg\") pod \"dnsmasq-dns-5d76bf78b7-hmd68\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.109394 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.261772 5081 generic.go:334] "Generic (PLEG): container finished" podID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerID="4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b" exitCode=0 Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.261837 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" event={"ID":"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3","Type":"ContainerDied","Data":"4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b"} Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.311892 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.570254 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d76bf78b7-hmd68"] Oct 03 16:55:37 crc kubenswrapper[5081]: I1003 16:55:37.827927 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:55:37 crc kubenswrapper[5081]: E1003 16:55:37.828492 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.270594 5081 generic.go:334] "Generic (PLEG): container finished" podID="91a96197-77e4-4185-8390-3a492352ae24" containerID="a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d" exitCode=0 Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.270674 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" event={"ID":"91a96197-77e4-4185-8390-3a492352ae24","Type":"ContainerDied","Data":"a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d"} Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.270712 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" event={"ID":"91a96197-77e4-4185-8390-3a492352ae24","Type":"ContainerStarted","Data":"62abd556518126a46543973350440d3d6e89cf1418b972cc9a219d95c9d6da00"} Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.275037 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" podUID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerName="dnsmasq-dns" containerID="cri-o://990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f" gracePeriod=10 Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.275126 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" event={"ID":"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3","Type":"ContainerStarted","Data":"990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f"} Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.275164 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.315129 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" podStartSLOduration=3.3151085 podStartE2EDuration="3.3151085s" podCreationTimestamp="2025-10-03 16:55:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:38.306982026 +0000 UTC m=+5257.272538659" watchObservedRunningTime="2025-10-03 16:55:38.3151085 +0000 UTC m=+5257.280665113" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.691618 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.739299 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-dns-svc\") pod \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.739352 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgbrk\" (UniqueName: \"kubernetes.io/projected/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-kube-api-access-dgbrk\") pod \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.739379 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-config\") pod \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.739415 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-ovsdbserver-nb\") pod \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\" (UID: \"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3\") " Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.747326 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-kube-api-access-dgbrk" (OuterVolumeSpecName: "kube-api-access-dgbrk") pod "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" (UID: "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3"). InnerVolumeSpecName "kube-api-access-dgbrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.805813 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" (UID: "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.816467 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-config" (OuterVolumeSpecName: "config") pod "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" (UID: "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.834209 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" (UID: "afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.840173 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.840202 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgbrk\" (UniqueName: \"kubernetes.io/projected/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-kube-api-access-dgbrk\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.840213 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-config\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:38 crc kubenswrapper[5081]: I1003 16:55:38.840221 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.285137 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" event={"ID":"91a96197-77e4-4185-8390-3a492352ae24","Type":"ContainerStarted","Data":"3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0"} Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.285262 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.287514 5081 generic.go:334] "Generic (PLEG): container finished" podID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerID="990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f" exitCode=0 Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.287546 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" event={"ID":"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3","Type":"ContainerDied","Data":"990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f"} Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.287578 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" event={"ID":"afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3","Type":"ContainerDied","Data":"697075395f2379c5ce8b6fc46762b30ffdad45177210a3ceb2d615cf6e713c00"} Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.287611 5081 scope.go:117] "RemoveContainer" containerID="990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.287778 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66668cf647-jvjt6" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.307157 5081 scope.go:117] "RemoveContainer" containerID="4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.309346 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" podStartSLOduration=3.309336481 podStartE2EDuration="3.309336481s" podCreationTimestamp="2025-10-03 16:55:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:39.306656304 +0000 UTC m=+5258.272212927" watchObservedRunningTime="2025-10-03 16:55:39.309336481 +0000 UTC m=+5258.274893094" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.322504 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66668cf647-jvjt6"] Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.339968 5081 scope.go:117] "RemoveContainer" containerID="990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.340307 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66668cf647-jvjt6"] Oct 03 16:55:39 crc kubenswrapper[5081]: E1003 16:55:39.340533 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f\": container with ID starting with 990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f not found: ID does not exist" containerID="990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.340626 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f"} err="failed to get container status \"990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f\": rpc error: code = NotFound desc = could not find container \"990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f\": container with ID starting with 990c1aa81da0c84bc6e33a921eef9afccabd99e469c8c4bfb088c63b4244815f not found: ID does not exist" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.340669 5081 scope.go:117] "RemoveContainer" containerID="4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b" Oct 03 16:55:39 crc kubenswrapper[5081]: E1003 16:55:39.341052 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b\": container with ID starting with 4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b not found: ID does not exist" containerID="4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.341091 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b"} err="failed to get container status \"4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b\": rpc error: code = NotFound desc = could not find container \"4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b\": container with ID starting with 4a63c50f96d84738982fbb13d8b04be0f9e25c4a793d2b4a70557f119d52393b not found: ID does not exist" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.851224 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" path="/var/lib/kubelet/pods/afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3/volumes" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.863367 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Oct 03 16:55:39 crc kubenswrapper[5081]: E1003 16:55:39.864141 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerName="dnsmasq-dns" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.864167 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerName="dnsmasq-dns" Oct 03 16:55:39 crc kubenswrapper[5081]: E1003 16:55:39.864184 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerName="init" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.864191 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerName="init" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.864362 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe8e9ae-3a89-4cd8-9772-9ca9045f7ab3" containerName="dnsmasq-dns" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.864974 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.866093 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Oct 03 16:55:39 crc kubenswrapper[5081]: I1003 16:55:39.874192 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.064310 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/bbd48186-ddc4-4dde-a445-a701bf8e3631-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.064977 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.065096 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptdx6\" (UniqueName: \"kubernetes.io/projected/bbd48186-ddc4-4dde-a445-a701bf8e3631-kube-api-access-ptdx6\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.166544 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptdx6\" (UniqueName: \"kubernetes.io/projected/bbd48186-ddc4-4dde-a445-a701bf8e3631-kube-api-access-ptdx6\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.166708 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/bbd48186-ddc4-4dde-a445-a701bf8e3631-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.166890 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.170218 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.170262 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0a35c7fec90f83349e9d0e6ac592f79790b53a57ab9e46e0f1f992467dc5d268/globalmount\"" pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.177597 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/bbd48186-ddc4-4dde-a445-a701bf8e3631-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.188009 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptdx6\" (UniqueName: \"kubernetes.io/projected/bbd48186-ddc4-4dde-a445-a701bf8e3631-kube-api-access-ptdx6\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.214412 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\") pod \"ovn-copy-data\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " pod="openstack/ovn-copy-data" Oct 03 16:55:40 crc kubenswrapper[5081]: I1003 16:55:40.488479 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 03 16:55:41 crc kubenswrapper[5081]: I1003 16:55:41.049224 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 03 16:55:41 crc kubenswrapper[5081]: I1003 16:55:41.064210 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 16:55:41 crc kubenswrapper[5081]: I1003 16:55:41.306831 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"bbd48186-ddc4-4dde-a445-a701bf8e3631","Type":"ContainerStarted","Data":"84c87876424e7eaadaa0c15f7d37d4fe3d1463761b537a99b848918feda47acf"} Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.159149 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qnfjm"] Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.161604 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.180177 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnfjm"] Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.216099 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-utilities\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.216146 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-catalog-content\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.216247 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqsdx\" (UniqueName: \"kubernetes.io/projected/4197775b-e9a6-4035-bede-e430af2f35ef-kube-api-access-zqsdx\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.319628 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqsdx\" (UniqueName: \"kubernetes.io/projected/4197775b-e9a6-4035-bede-e430af2f35ef-kube-api-access-zqsdx\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.319691 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-utilities\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.319714 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-catalog-content\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.320605 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-catalog-content\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.320913 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-utilities\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.372041 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqsdx\" (UniqueName: \"kubernetes.io/projected/4197775b-e9a6-4035-bede-e430af2f35ef-kube-api-access-zqsdx\") pod \"certified-operators-qnfjm\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:42 crc kubenswrapper[5081]: I1003 16:55:42.516471 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:43 crc kubenswrapper[5081]: I1003 16:55:43.051237 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnfjm"] Oct 03 16:55:44 crc kubenswrapper[5081]: I1003 16:55:44.336413 5081 generic.go:334] "Generic (PLEG): container finished" podID="4197775b-e9a6-4035-bede-e430af2f35ef" containerID="b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2" exitCode=0 Oct 03 16:55:44 crc kubenswrapper[5081]: I1003 16:55:44.336485 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnfjm" event={"ID":"4197775b-e9a6-4035-bede-e430af2f35ef","Type":"ContainerDied","Data":"b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2"} Oct 03 16:55:44 crc kubenswrapper[5081]: I1003 16:55:44.336817 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnfjm" event={"ID":"4197775b-e9a6-4035-bede-e430af2f35ef","Type":"ContainerStarted","Data":"2f6afc22eb385a9fb850a774cde6d8294916843f9939958ff081139584fb9197"} Oct 03 16:55:44 crc kubenswrapper[5081]: I1003 16:55:44.338536 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"bbd48186-ddc4-4dde-a445-a701bf8e3631","Type":"ContainerStarted","Data":"5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a"} Oct 03 16:55:44 crc kubenswrapper[5081]: I1003 16:55:44.372363 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.3640488 podStartE2EDuration="6.37234704s" podCreationTimestamp="2025-10-03 16:55:38 +0000 UTC" firstStartedPulling="2025-10-03 16:55:41.063960007 +0000 UTC m=+5260.029516620" lastFinishedPulling="2025-10-03 16:55:44.072258247 +0000 UTC m=+5263.037814860" observedRunningTime="2025-10-03 16:55:44.365131402 +0000 UTC m=+5263.330688025" watchObservedRunningTime="2025-10-03 16:55:44.37234704 +0000 UTC m=+5263.337903663" Oct 03 16:55:45 crc kubenswrapper[5081]: I1003 16:55:45.348120 5081 generic.go:334] "Generic (PLEG): container finished" podID="4197775b-e9a6-4035-bede-e430af2f35ef" containerID="615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199" exitCode=0 Oct 03 16:55:45 crc kubenswrapper[5081]: I1003 16:55:45.348164 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnfjm" event={"ID":"4197775b-e9a6-4035-bede-e430af2f35ef","Type":"ContainerDied","Data":"615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199"} Oct 03 16:55:46 crc kubenswrapper[5081]: I1003 16:55:46.359136 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnfjm" event={"ID":"4197775b-e9a6-4035-bede-e430af2f35ef","Type":"ContainerStarted","Data":"a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06"} Oct 03 16:55:46 crc kubenswrapper[5081]: I1003 16:55:46.374964 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qnfjm" podStartSLOduration=2.8731671260000002 podStartE2EDuration="4.374947548s" podCreationTimestamp="2025-10-03 16:55:42 +0000 UTC" firstStartedPulling="2025-10-03 16:55:44.338676001 +0000 UTC m=+5263.304232614" lastFinishedPulling="2025-10-03 16:55:45.840456403 +0000 UTC m=+5264.806013036" observedRunningTime="2025-10-03 16:55:46.37432036 +0000 UTC m=+5265.339877003" watchObservedRunningTime="2025-10-03 16:55:46.374947548 +0000 UTC m=+5265.340504161" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.110768 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.174631 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669d466995-flj8w"] Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.175004 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-669d466995-flj8w" podUID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerName="dnsmasq-dns" containerID="cri-o://2c57c7c17665904a0af5bc48e5aa7f368341c5baf8297af7b8d67d4f96c2b18e" gracePeriod=10 Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.369155 5081 generic.go:334] "Generic (PLEG): container finished" podID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerID="2c57c7c17665904a0af5bc48e5aa7f368341c5baf8297af7b8d67d4f96c2b18e" exitCode=0 Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.369246 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669d466995-flj8w" event={"ID":"bba4d832-4690-4a31-a7c3-9b2139cbeb2f","Type":"ContainerDied","Data":"2c57c7c17665904a0af5bc48e5aa7f368341c5baf8297af7b8d67d4f96c2b18e"} Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.639429 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.813509 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-config\") pod \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.813725 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dckz5\" (UniqueName: \"kubernetes.io/projected/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-kube-api-access-dckz5\") pod \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.813770 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-dns-svc\") pod \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\" (UID: \"bba4d832-4690-4a31-a7c3-9b2139cbeb2f\") " Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.821317 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-kube-api-access-dckz5" (OuterVolumeSpecName: "kube-api-access-dckz5") pod "bba4d832-4690-4a31-a7c3-9b2139cbeb2f" (UID: "bba4d832-4690-4a31-a7c3-9b2139cbeb2f"). InnerVolumeSpecName "kube-api-access-dckz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.854381 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-config" (OuterVolumeSpecName: "config") pod "bba4d832-4690-4a31-a7c3-9b2139cbeb2f" (UID: "bba4d832-4690-4a31-a7c3-9b2139cbeb2f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.863354 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bba4d832-4690-4a31-a7c3-9b2139cbeb2f" (UID: "bba4d832-4690-4a31-a7c3-9b2139cbeb2f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.915685 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dckz5\" (UniqueName: \"kubernetes.io/projected/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-kube-api-access-dckz5\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.915715 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:47 crc kubenswrapper[5081]: I1003 16:55:47.915727 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bba4d832-4690-4a31-a7c3-9b2139cbeb2f-config\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:48 crc kubenswrapper[5081]: I1003 16:55:48.379059 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669d466995-flj8w" event={"ID":"bba4d832-4690-4a31-a7c3-9b2139cbeb2f","Type":"ContainerDied","Data":"4117842db802491b321f907623f6f4a7b9a574a2583869e3275d45a0654afa7a"} Oct 03 16:55:48 crc kubenswrapper[5081]: I1003 16:55:48.379428 5081 scope.go:117] "RemoveContainer" containerID="2c57c7c17665904a0af5bc48e5aa7f368341c5baf8297af7b8d67d4f96c2b18e" Oct 03 16:55:48 crc kubenswrapper[5081]: I1003 16:55:48.379146 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669d466995-flj8w" Oct 03 16:55:48 crc kubenswrapper[5081]: I1003 16:55:48.409948 5081 scope.go:117] "RemoveContainer" containerID="362159724da2618ab4905b738878bacb754ce61aab453b7c01ba39e37548d10b" Oct 03 16:55:48 crc kubenswrapper[5081]: I1003 16:55:48.417229 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669d466995-flj8w"] Oct 03 16:55:48 crc kubenswrapper[5081]: I1003 16:55:48.424173 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-669d466995-flj8w"] Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.245260 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 03 16:55:49 crc kubenswrapper[5081]: E1003 16:55:49.245609 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerName="init" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.245624 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerName="init" Oct 03 16:55:49 crc kubenswrapper[5081]: E1003 16:55:49.245650 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerName="dnsmasq-dns" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.245657 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerName="dnsmasq-dns" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.245825 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" containerName="dnsmasq-dns" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.246759 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.249253 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.249355 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.250717 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-jtqhg" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.260736 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.344906 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38891ae7-ca75-4e26-817f-b3594f81aa8b-scripts\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.344961 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38891ae7-ca75-4e26-817f-b3594f81aa8b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.345057 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38891ae7-ca75-4e26-817f-b3594f81aa8b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.345097 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88tn8\" (UniqueName: \"kubernetes.io/projected/38891ae7-ca75-4e26-817f-b3594f81aa8b-kube-api-access-88tn8\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.345127 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38891ae7-ca75-4e26-817f-b3594f81aa8b-config\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.447047 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38891ae7-ca75-4e26-817f-b3594f81aa8b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.447341 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88tn8\" (UniqueName: \"kubernetes.io/projected/38891ae7-ca75-4e26-817f-b3594f81aa8b-kube-api-access-88tn8\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.447380 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38891ae7-ca75-4e26-817f-b3594f81aa8b-config\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.447420 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38891ae7-ca75-4e26-817f-b3594f81aa8b-scripts\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.447439 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38891ae7-ca75-4e26-817f-b3594f81aa8b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.447528 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38891ae7-ca75-4e26-817f-b3594f81aa8b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.448484 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38891ae7-ca75-4e26-817f-b3594f81aa8b-config\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.448681 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38891ae7-ca75-4e26-817f-b3594f81aa8b-scripts\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.457972 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38891ae7-ca75-4e26-817f-b3594f81aa8b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.466382 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88tn8\" (UniqueName: \"kubernetes.io/projected/38891ae7-ca75-4e26-817f-b3594f81aa8b-kube-api-access-88tn8\") pod \"ovn-northd-0\" (UID: \"38891ae7-ca75-4e26-817f-b3594f81aa8b\") " pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.571645 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 16:55:49 crc kubenswrapper[5081]: I1003 16:55:49.837143 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bba4d832-4690-4a31-a7c3-9b2139cbeb2f" path="/var/lib/kubelet/pods/bba4d832-4690-4a31-a7c3-9b2139cbeb2f/volumes" Oct 03 16:55:50 crc kubenswrapper[5081]: I1003 16:55:50.004774 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 16:55:50 crc kubenswrapper[5081]: W1003 16:55:50.007972 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38891ae7_ca75_4e26_817f_b3594f81aa8b.slice/crio-6537324890cefeb0eb50b7959c4542272b0e5cb2ae9c38a920a6a0339e29d750 WatchSource:0}: Error finding container 6537324890cefeb0eb50b7959c4542272b0e5cb2ae9c38a920a6a0339e29d750: Status 404 returned error can't find the container with id 6537324890cefeb0eb50b7959c4542272b0e5cb2ae9c38a920a6a0339e29d750 Oct 03 16:55:50 crc kubenswrapper[5081]: I1003 16:55:50.403209 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"38891ae7-ca75-4e26-817f-b3594f81aa8b","Type":"ContainerStarted","Data":"418359ec72e20c3d971c2f05edfbfb3ac81ebb7f60c7a0aef83a8f0a45baa965"} Oct 03 16:55:50 crc kubenswrapper[5081]: I1003 16:55:50.403543 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"38891ae7-ca75-4e26-817f-b3594f81aa8b","Type":"ContainerStarted","Data":"648273db69fc5062678a4269e493c21c524bda6557f8f2aa4da2e586a4593857"} Oct 03 16:55:50 crc kubenswrapper[5081]: I1003 16:55:50.403553 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"38891ae7-ca75-4e26-817f-b3594f81aa8b","Type":"ContainerStarted","Data":"6537324890cefeb0eb50b7959c4542272b0e5cb2ae9c38a920a6a0339e29d750"} Oct 03 16:55:50 crc kubenswrapper[5081]: I1003 16:55:50.404647 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 03 16:55:50 crc kubenswrapper[5081]: I1003 16:55:50.428187 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.428165678 podStartE2EDuration="1.428165678s" podCreationTimestamp="2025-10-03 16:55:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:55:50.422622128 +0000 UTC m=+5269.388178761" watchObservedRunningTime="2025-10-03 16:55:50.428165678 +0000 UTC m=+5269.393722311" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.280510 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hljwl"] Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.282266 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.298747 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hljwl"] Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.377791 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6tzv\" (UniqueName: \"kubernetes.io/projected/12dce964-7149-4663-aef7-92c5c102b6c0-kube-api-access-p6tzv\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.377855 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-utilities\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.377947 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-catalog-content\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.479353 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-catalog-content\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.479490 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6tzv\" (UniqueName: \"kubernetes.io/projected/12dce964-7149-4663-aef7-92c5c102b6c0-kube-api-access-p6tzv\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.479519 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-utilities\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.480003 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-catalog-content\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.480421 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-utilities\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.505108 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6tzv\" (UniqueName: \"kubernetes.io/projected/12dce964-7149-4663-aef7-92c5c102b6c0-kube-api-access-p6tzv\") pod \"community-operators-hljwl\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:51 crc kubenswrapper[5081]: I1003 16:55:51.652615 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:55:52 crc kubenswrapper[5081]: I1003 16:55:52.168408 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hljwl"] Oct 03 16:55:52 crc kubenswrapper[5081]: W1003 16:55:52.179884 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12dce964_7149_4663_aef7_92c5c102b6c0.slice/crio-8f00ba4fbb45573bc54d2d06d169d3cf13ba83b5f43719597f5118c0470d64c0 WatchSource:0}: Error finding container 8f00ba4fbb45573bc54d2d06d169d3cf13ba83b5f43719597f5118c0470d64c0: Status 404 returned error can't find the container with id 8f00ba4fbb45573bc54d2d06d169d3cf13ba83b5f43719597f5118c0470d64c0 Oct 03 16:55:52 crc kubenswrapper[5081]: I1003 16:55:52.436792 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerStarted","Data":"2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c"} Oct 03 16:55:52 crc kubenswrapper[5081]: I1003 16:55:52.436875 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerStarted","Data":"8f00ba4fbb45573bc54d2d06d169d3cf13ba83b5f43719597f5118c0470d64c0"} Oct 03 16:55:52 crc kubenswrapper[5081]: I1003 16:55:52.516782 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:52 crc kubenswrapper[5081]: I1003 16:55:52.516833 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:52 crc kubenswrapper[5081]: I1003 16:55:52.570674 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:52 crc kubenswrapper[5081]: E1003 16:55:52.615436 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12dce964_7149_4663_aef7_92c5c102b6c0.slice/crio-conmon-2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12dce964_7149_4663_aef7_92c5c102b6c0.slice/crio-2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c.scope\": RecentStats: unable to find data in memory cache]" Oct 03 16:55:52 crc kubenswrapper[5081]: I1003 16:55:52.828131 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:55:52 crc kubenswrapper[5081]: E1003 16:55:52.828421 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:55:53 crc kubenswrapper[5081]: I1003 16:55:53.443864 5081 generic.go:334] "Generic (PLEG): container finished" podID="12dce964-7149-4663-aef7-92c5c102b6c0" containerID="2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c" exitCode=0 Oct 03 16:55:53 crc kubenswrapper[5081]: I1003 16:55:53.443950 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerDied","Data":"2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c"} Oct 03 16:55:53 crc kubenswrapper[5081]: I1003 16:55:53.491497 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.382782 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-kr6pl"] Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.384330 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kr6pl" Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.405037 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kr6pl"] Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.461701 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerStarted","Data":"bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da"} Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.559245 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gzwn\" (UniqueName: \"kubernetes.io/projected/a7e99a29-237d-4ef4-9632-b9e2d990e0f4-kube-api-access-2gzwn\") pod \"keystone-db-create-kr6pl\" (UID: \"a7e99a29-237d-4ef4-9632-b9e2d990e0f4\") " pod="openstack/keystone-db-create-kr6pl" Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.660877 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gzwn\" (UniqueName: \"kubernetes.io/projected/a7e99a29-237d-4ef4-9632-b9e2d990e0f4-kube-api-access-2gzwn\") pod \"keystone-db-create-kr6pl\" (UID: \"a7e99a29-237d-4ef4-9632-b9e2d990e0f4\") " pod="openstack/keystone-db-create-kr6pl" Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.680311 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gzwn\" (UniqueName: \"kubernetes.io/projected/a7e99a29-237d-4ef4-9632-b9e2d990e0f4-kube-api-access-2gzwn\") pod \"keystone-db-create-kr6pl\" (UID: \"a7e99a29-237d-4ef4-9632-b9e2d990e0f4\") " pod="openstack/keystone-db-create-kr6pl" Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.714350 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kr6pl" Oct 03 16:55:54 crc kubenswrapper[5081]: I1003 16:55:54.852593 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnfjm"] Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.172032 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kr6pl"] Oct 03 16:55:55 crc kubenswrapper[5081]: W1003 16:55:55.175605 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7e99a29_237d_4ef4_9632_b9e2d990e0f4.slice/crio-b07a092996835edbf716dd99a9e16a62828c0a2f8a15aeaf270953a93dd18eb2 WatchSource:0}: Error finding container b07a092996835edbf716dd99a9e16a62828c0a2f8a15aeaf270953a93dd18eb2: Status 404 returned error can't find the container with id b07a092996835edbf716dd99a9e16a62828c0a2f8a15aeaf270953a93dd18eb2 Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.483668 5081 generic.go:334] "Generic (PLEG): container finished" podID="12dce964-7149-4663-aef7-92c5c102b6c0" containerID="bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da" exitCode=0 Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.483934 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerDied","Data":"bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da"} Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.488286 5081 generic.go:334] "Generic (PLEG): container finished" podID="a7e99a29-237d-4ef4-9632-b9e2d990e0f4" containerID="9d52cd93fd242289b177af03db3fd115f62ce9da033a500c3ec7abc510f135ed" exitCode=0 Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.488428 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kr6pl" event={"ID":"a7e99a29-237d-4ef4-9632-b9e2d990e0f4","Type":"ContainerDied","Data":"9d52cd93fd242289b177af03db3fd115f62ce9da033a500c3ec7abc510f135ed"} Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.488479 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kr6pl" event={"ID":"a7e99a29-237d-4ef4-9632-b9e2d990e0f4","Type":"ContainerStarted","Data":"b07a092996835edbf716dd99a9e16a62828c0a2f8a15aeaf270953a93dd18eb2"} Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.488537 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qnfjm" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="registry-server" containerID="cri-o://a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06" gracePeriod=2 Oct 03 16:55:55 crc kubenswrapper[5081]: I1003 16:55:55.926446 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.085800 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqsdx\" (UniqueName: \"kubernetes.io/projected/4197775b-e9a6-4035-bede-e430af2f35ef-kube-api-access-zqsdx\") pod \"4197775b-e9a6-4035-bede-e430af2f35ef\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.085888 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-catalog-content\") pod \"4197775b-e9a6-4035-bede-e430af2f35ef\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.085919 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-utilities\") pod \"4197775b-e9a6-4035-bede-e430af2f35ef\" (UID: \"4197775b-e9a6-4035-bede-e430af2f35ef\") " Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.086884 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-utilities" (OuterVolumeSpecName: "utilities") pod "4197775b-e9a6-4035-bede-e430af2f35ef" (UID: "4197775b-e9a6-4035-bede-e430af2f35ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.092702 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4197775b-e9a6-4035-bede-e430af2f35ef-kube-api-access-zqsdx" (OuterVolumeSpecName: "kube-api-access-zqsdx") pod "4197775b-e9a6-4035-bede-e430af2f35ef" (UID: "4197775b-e9a6-4035-bede-e430af2f35ef"). InnerVolumeSpecName "kube-api-access-zqsdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.132820 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4197775b-e9a6-4035-bede-e430af2f35ef" (UID: "4197775b-e9a6-4035-bede-e430af2f35ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.188642 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqsdx\" (UniqueName: \"kubernetes.io/projected/4197775b-e9a6-4035-bede-e430af2f35ef-kube-api-access-zqsdx\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.188686 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.188698 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4197775b-e9a6-4035-bede-e430af2f35ef-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.504401 5081 generic.go:334] "Generic (PLEG): container finished" podID="4197775b-e9a6-4035-bede-e430af2f35ef" containerID="a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06" exitCode=0 Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.504486 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnfjm" event={"ID":"4197775b-e9a6-4035-bede-e430af2f35ef","Type":"ContainerDied","Data":"a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06"} Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.504510 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnfjm" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.504534 5081 scope.go:117] "RemoveContainer" containerID="a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.504519 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnfjm" event={"ID":"4197775b-e9a6-4035-bede-e430af2f35ef","Type":"ContainerDied","Data":"2f6afc22eb385a9fb850a774cde6d8294916843f9939958ff081139584fb9197"} Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.509888 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerStarted","Data":"436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d"} Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.534037 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hljwl" podStartSLOduration=3.027233642 podStartE2EDuration="5.533937142s" podCreationTimestamp="2025-10-03 16:55:51 +0000 UTC" firstStartedPulling="2025-10-03 16:55:53.445875388 +0000 UTC m=+5272.411432001" lastFinishedPulling="2025-10-03 16:55:55.952578888 +0000 UTC m=+5274.918135501" observedRunningTime="2025-10-03 16:55:56.52863733 +0000 UTC m=+5275.494193963" watchObservedRunningTime="2025-10-03 16:55:56.533937142 +0000 UTC m=+5275.499493755" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.553075 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnfjm"] Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.561926 5081 scope.go:117] "RemoveContainer" containerID="615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.563139 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qnfjm"] Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.583521 5081 scope.go:117] "RemoveContainer" containerID="b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.619102 5081 scope.go:117] "RemoveContainer" containerID="a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06" Oct 03 16:55:56 crc kubenswrapper[5081]: E1003 16:55:56.621333 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06\": container with ID starting with a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06 not found: ID does not exist" containerID="a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.621380 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06"} err="failed to get container status \"a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06\": rpc error: code = NotFound desc = could not find container \"a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06\": container with ID starting with a39087bf86a4175df0c794477f6770a20a8c412af700cee117362ba3a46d5d06 not found: ID does not exist" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.621410 5081 scope.go:117] "RemoveContainer" containerID="615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199" Oct 03 16:55:56 crc kubenswrapper[5081]: E1003 16:55:56.622076 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199\": container with ID starting with 615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199 not found: ID does not exist" containerID="615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.622099 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199"} err="failed to get container status \"615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199\": rpc error: code = NotFound desc = could not find container \"615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199\": container with ID starting with 615d0ea143b2b6347083982bb7d6ac6c18bbe0a6525afe51b081bdbf3e5c2199 not found: ID does not exist" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.622114 5081 scope.go:117] "RemoveContainer" containerID="b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2" Oct 03 16:55:56 crc kubenswrapper[5081]: E1003 16:55:56.622583 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2\": container with ID starting with b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2 not found: ID does not exist" containerID="b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.622866 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2"} err="failed to get container status \"b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2\": rpc error: code = NotFound desc = could not find container \"b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2\": container with ID starting with b61a22090d7a53c38ede36373e0d104981df3b66cd2001566c27f2699b2473b2 not found: ID does not exist" Oct 03 16:55:56 crc kubenswrapper[5081]: I1003 16:55:56.890416 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kr6pl" Oct 03 16:55:57 crc kubenswrapper[5081]: I1003 16:55:57.002916 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gzwn\" (UniqueName: \"kubernetes.io/projected/a7e99a29-237d-4ef4-9632-b9e2d990e0f4-kube-api-access-2gzwn\") pod \"a7e99a29-237d-4ef4-9632-b9e2d990e0f4\" (UID: \"a7e99a29-237d-4ef4-9632-b9e2d990e0f4\") " Oct 03 16:55:57 crc kubenswrapper[5081]: I1003 16:55:57.009816 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7e99a29-237d-4ef4-9632-b9e2d990e0f4-kube-api-access-2gzwn" (OuterVolumeSpecName: "kube-api-access-2gzwn") pod "a7e99a29-237d-4ef4-9632-b9e2d990e0f4" (UID: "a7e99a29-237d-4ef4-9632-b9e2d990e0f4"). InnerVolumeSpecName "kube-api-access-2gzwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:55:57 crc kubenswrapper[5081]: I1003 16:55:57.105402 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gzwn\" (UniqueName: \"kubernetes.io/projected/a7e99a29-237d-4ef4-9632-b9e2d990e0f4-kube-api-access-2gzwn\") on node \"crc\" DevicePath \"\"" Oct 03 16:55:57 crc kubenswrapper[5081]: I1003 16:55:57.520840 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kr6pl" Oct 03 16:55:57 crc kubenswrapper[5081]: I1003 16:55:57.521330 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kr6pl" event={"ID":"a7e99a29-237d-4ef4-9632-b9e2d990e0f4","Type":"ContainerDied","Data":"b07a092996835edbf716dd99a9e16a62828c0a2f8a15aeaf270953a93dd18eb2"} Oct 03 16:55:57 crc kubenswrapper[5081]: I1003 16:55:57.521402 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b07a092996835edbf716dd99a9e16a62828c0a2f8a15aeaf270953a93dd18eb2" Oct 03 16:55:57 crc kubenswrapper[5081]: I1003 16:55:57.839498 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" path="/var/lib/kubelet/pods/4197775b-e9a6-4035-bede-e430af2f35ef/volumes" Oct 03 16:56:01 crc kubenswrapper[5081]: I1003 16:56:01.654243 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:56:01 crc kubenswrapper[5081]: I1003 16:56:01.654536 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:56:01 crc kubenswrapper[5081]: I1003 16:56:01.695979 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:56:02 crc kubenswrapper[5081]: I1003 16:56:02.630307 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:56:02 crc kubenswrapper[5081]: I1003 16:56:02.693222 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hljwl"] Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.466690 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9b2f-account-create-vqvmp"] Oct 03 16:56:04 crc kubenswrapper[5081]: E1003 16:56:04.467464 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e99a29-237d-4ef4-9632-b9e2d990e0f4" containerName="mariadb-database-create" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.467480 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e99a29-237d-4ef4-9632-b9e2d990e0f4" containerName="mariadb-database-create" Oct 03 16:56:04 crc kubenswrapper[5081]: E1003 16:56:04.467498 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="extract-utilities" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.467505 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="extract-utilities" Oct 03 16:56:04 crc kubenswrapper[5081]: E1003 16:56:04.467519 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="extract-content" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.467526 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="extract-content" Oct 03 16:56:04 crc kubenswrapper[5081]: E1003 16:56:04.467539 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="registry-server" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.467545 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="registry-server" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.467734 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e99a29-237d-4ef4-9632-b9e2d990e0f4" containerName="mariadb-database-create" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.467754 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4197775b-e9a6-4035-bede-e430af2f35ef" containerName="registry-server" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.471335 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b2f-account-create-vqvmp" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.473472 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.477017 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9b2f-account-create-vqvmp"] Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.588958 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hljwl" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="registry-server" containerID="cri-o://436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d" gracePeriod=2 Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.627371 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.633754 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv4vt\" (UniqueName: \"kubernetes.io/projected/47783f9d-99a3-407e-ba0d-485a66c35a73-kube-api-access-dv4vt\") pod \"keystone-9b2f-account-create-vqvmp\" (UID: \"47783f9d-99a3-407e-ba0d-485a66c35a73\") " pod="openstack/keystone-9b2f-account-create-vqvmp" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.736716 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv4vt\" (UniqueName: \"kubernetes.io/projected/47783f9d-99a3-407e-ba0d-485a66c35a73-kube-api-access-dv4vt\") pod \"keystone-9b2f-account-create-vqvmp\" (UID: \"47783f9d-99a3-407e-ba0d-485a66c35a73\") " pod="openstack/keystone-9b2f-account-create-vqvmp" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.757209 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv4vt\" (UniqueName: \"kubernetes.io/projected/47783f9d-99a3-407e-ba0d-485a66c35a73-kube-api-access-dv4vt\") pod \"keystone-9b2f-account-create-vqvmp\" (UID: \"47783f9d-99a3-407e-ba0d-485a66c35a73\") " pod="openstack/keystone-9b2f-account-create-vqvmp" Oct 03 16:56:04 crc kubenswrapper[5081]: I1003 16:56:04.792818 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b2f-account-create-vqvmp" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.044538 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.243979 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6tzv\" (UniqueName: \"kubernetes.io/projected/12dce964-7149-4663-aef7-92c5c102b6c0-kube-api-access-p6tzv\") pod \"12dce964-7149-4663-aef7-92c5c102b6c0\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.244067 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-utilities\") pod \"12dce964-7149-4663-aef7-92c5c102b6c0\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.245411 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-utilities" (OuterVolumeSpecName: "utilities") pod "12dce964-7149-4663-aef7-92c5c102b6c0" (UID: "12dce964-7149-4663-aef7-92c5c102b6c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.249176 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12dce964-7149-4663-aef7-92c5c102b6c0-kube-api-access-p6tzv" (OuterVolumeSpecName: "kube-api-access-p6tzv") pod "12dce964-7149-4663-aef7-92c5c102b6c0" (UID: "12dce964-7149-4663-aef7-92c5c102b6c0"). InnerVolumeSpecName "kube-api-access-p6tzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.245550 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-catalog-content\") pod \"12dce964-7149-4663-aef7-92c5c102b6c0\" (UID: \"12dce964-7149-4663-aef7-92c5c102b6c0\") " Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.250986 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6tzv\" (UniqueName: \"kubernetes.io/projected/12dce964-7149-4663-aef7-92c5c102b6c0-kube-api-access-p6tzv\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.251014 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.256525 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9b2f-account-create-vqvmp"] Oct 03 16:56:05 crc kubenswrapper[5081]: W1003 16:56:05.259434 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47783f9d_99a3_407e_ba0d_485a66c35a73.slice/crio-c87bc673945c7c1b0a9e52caf9122ed2b4b109df7be866c0b2c87d4e04266bae WatchSource:0}: Error finding container c87bc673945c7c1b0a9e52caf9122ed2b4b109df7be866c0b2c87d4e04266bae: Status 404 returned error can't find the container with id c87bc673945c7c1b0a9e52caf9122ed2b4b109df7be866c0b2c87d4e04266bae Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.304709 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12dce964-7149-4663-aef7-92c5c102b6c0" (UID: "12dce964-7149-4663-aef7-92c5c102b6c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.353643 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12dce964-7149-4663-aef7-92c5c102b6c0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.601062 5081 generic.go:334] "Generic (PLEG): container finished" podID="12dce964-7149-4663-aef7-92c5c102b6c0" containerID="436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d" exitCode=0 Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.601148 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerDied","Data":"436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d"} Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.601219 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hljwl" event={"ID":"12dce964-7149-4663-aef7-92c5c102b6c0","Type":"ContainerDied","Data":"8f00ba4fbb45573bc54d2d06d169d3cf13ba83b5f43719597f5118c0470d64c0"} Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.601251 5081 scope.go:117] "RemoveContainer" containerID="436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.601163 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hljwl" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.604300 5081 generic.go:334] "Generic (PLEG): container finished" podID="47783f9d-99a3-407e-ba0d-485a66c35a73" containerID="8e187ba35b9795666e456d316c84c1a0775e2258f9a358526be61c903c232ac7" exitCode=0 Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.604346 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b2f-account-create-vqvmp" event={"ID":"47783f9d-99a3-407e-ba0d-485a66c35a73","Type":"ContainerDied","Data":"8e187ba35b9795666e456d316c84c1a0775e2258f9a358526be61c903c232ac7"} Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.604376 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b2f-account-create-vqvmp" event={"ID":"47783f9d-99a3-407e-ba0d-485a66c35a73","Type":"ContainerStarted","Data":"c87bc673945c7c1b0a9e52caf9122ed2b4b109df7be866c0b2c87d4e04266bae"} Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.634809 5081 scope.go:117] "RemoveContainer" containerID="bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.639434 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hljwl"] Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.645569 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hljwl"] Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.676103 5081 scope.go:117] "RemoveContainer" containerID="2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.701843 5081 scope.go:117] "RemoveContainer" containerID="436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d" Oct 03 16:56:05 crc kubenswrapper[5081]: E1003 16:56:05.702287 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d\": container with ID starting with 436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d not found: ID does not exist" containerID="436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.702337 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d"} err="failed to get container status \"436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d\": rpc error: code = NotFound desc = could not find container \"436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d\": container with ID starting with 436d843c73f5d25e117e4caebc975b2ae0a4f1d343f8d049ed571feb407bc41d not found: ID does not exist" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.702365 5081 scope.go:117] "RemoveContainer" containerID="bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da" Oct 03 16:56:05 crc kubenswrapper[5081]: E1003 16:56:05.702662 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da\": container with ID starting with bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da not found: ID does not exist" containerID="bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.702694 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da"} err="failed to get container status \"bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da\": rpc error: code = NotFound desc = could not find container \"bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da\": container with ID starting with bdbc9335f039b119da709e8242807e07f80d67eba9dda951995d37fcf77a97da not found: ID does not exist" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.702708 5081 scope.go:117] "RemoveContainer" containerID="2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c" Oct 03 16:56:05 crc kubenswrapper[5081]: E1003 16:56:05.703122 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c\": container with ID starting with 2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c not found: ID does not exist" containerID="2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.703156 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c"} err="failed to get container status \"2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c\": rpc error: code = NotFound desc = could not find container \"2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c\": container with ID starting with 2f60153e96d8b0de46007e58097ae743abce32d4b53d16f2049aa78167faf90c not found: ID does not exist" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.828363 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:56:05 crc kubenswrapper[5081]: E1003 16:56:05.829355 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:56:05 crc kubenswrapper[5081]: I1003 16:56:05.839609 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" path="/var/lib/kubelet/pods/12dce964-7149-4663-aef7-92c5c102b6c0/volumes" Oct 03 16:56:06 crc kubenswrapper[5081]: I1003 16:56:06.908885 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b2f-account-create-vqvmp" Oct 03 16:56:07 crc kubenswrapper[5081]: I1003 16:56:07.078440 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv4vt\" (UniqueName: \"kubernetes.io/projected/47783f9d-99a3-407e-ba0d-485a66c35a73-kube-api-access-dv4vt\") pod \"47783f9d-99a3-407e-ba0d-485a66c35a73\" (UID: \"47783f9d-99a3-407e-ba0d-485a66c35a73\") " Oct 03 16:56:07 crc kubenswrapper[5081]: I1003 16:56:07.084020 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47783f9d-99a3-407e-ba0d-485a66c35a73-kube-api-access-dv4vt" (OuterVolumeSpecName: "kube-api-access-dv4vt") pod "47783f9d-99a3-407e-ba0d-485a66c35a73" (UID: "47783f9d-99a3-407e-ba0d-485a66c35a73"). InnerVolumeSpecName "kube-api-access-dv4vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:56:07 crc kubenswrapper[5081]: I1003 16:56:07.180353 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv4vt\" (UniqueName: \"kubernetes.io/projected/47783f9d-99a3-407e-ba0d-485a66c35a73-kube-api-access-dv4vt\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:07 crc kubenswrapper[5081]: I1003 16:56:07.621019 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9b2f-account-create-vqvmp" event={"ID":"47783f9d-99a3-407e-ba0d-485a66c35a73","Type":"ContainerDied","Data":"c87bc673945c7c1b0a9e52caf9122ed2b4b109df7be866c0b2c87d4e04266bae"} Oct 03 16:56:07 crc kubenswrapper[5081]: I1003 16:56:07.621629 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c87bc673945c7c1b0a9e52caf9122ed2b4b109df7be866c0b2c87d4e04266bae" Oct 03 16:56:07 crc kubenswrapper[5081]: I1003 16:56:07.621062 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9b2f-account-create-vqvmp" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.837432 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-6jswn"] Oct 03 16:56:09 crc kubenswrapper[5081]: E1003 16:56:09.837714 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="registry-server" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.837726 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="registry-server" Oct 03 16:56:09 crc kubenswrapper[5081]: E1003 16:56:09.837746 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47783f9d-99a3-407e-ba0d-485a66c35a73" containerName="mariadb-account-create" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.837752 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="47783f9d-99a3-407e-ba0d-485a66c35a73" containerName="mariadb-account-create" Oct 03 16:56:09 crc kubenswrapper[5081]: E1003 16:56:09.837765 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="extract-utilities" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.837771 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="extract-utilities" Oct 03 16:56:09 crc kubenswrapper[5081]: E1003 16:56:09.837794 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="extract-content" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.837799 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="extract-content" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.837957 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="12dce964-7149-4663-aef7-92c5c102b6c0" containerName="registry-server" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.837993 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="47783f9d-99a3-407e-ba0d-485a66c35a73" containerName="mariadb-account-create" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.838496 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-6jswn"] Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.838591 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.840333 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.841144 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bfd8d" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.841385 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 16:56:09 crc kubenswrapper[5081]: I1003 16:56:09.844780 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.027261 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpnjx\" (UniqueName: \"kubernetes.io/projected/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-kube-api-access-lpnjx\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.027367 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-combined-ca-bundle\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.027534 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-config-data\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.128822 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-config-data\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.128893 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpnjx\" (UniqueName: \"kubernetes.io/projected/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-kube-api-access-lpnjx\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.128932 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-combined-ca-bundle\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.136609 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-config-data\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.147144 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-combined-ca-bundle\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.189432 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpnjx\" (UniqueName: \"kubernetes.io/projected/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-kube-api-access-lpnjx\") pod \"keystone-db-sync-6jswn\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.457037 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:10 crc kubenswrapper[5081]: I1003 16:56:10.856953 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-6jswn"] Oct 03 16:56:10 crc kubenswrapper[5081]: W1003 16:56:10.858375 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e4ecbe6_c4ff_4931_b59e_b0e4aee9d144.slice/crio-4c591e0886fdd61b6677c124421302d7b4346d5780784aa8a3794c517fd2ed23 WatchSource:0}: Error finding container 4c591e0886fdd61b6677c124421302d7b4346d5780784aa8a3794c517fd2ed23: Status 404 returned error can't find the container with id 4c591e0886fdd61b6677c124421302d7b4346d5780784aa8a3794c517fd2ed23 Oct 03 16:56:11 crc kubenswrapper[5081]: I1003 16:56:11.650838 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6jswn" event={"ID":"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144","Type":"ContainerStarted","Data":"626dfd33d9ca0721cd8a0fb45936ffd3f7f6cc6ff915e7cac93665e9e73deb5c"} Oct 03 16:56:11 crc kubenswrapper[5081]: I1003 16:56:11.650900 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6jswn" event={"ID":"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144","Type":"ContainerStarted","Data":"4c591e0886fdd61b6677c124421302d7b4346d5780784aa8a3794c517fd2ed23"} Oct 03 16:56:11 crc kubenswrapper[5081]: I1003 16:56:11.668259 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-6jswn" podStartSLOduration=2.6682355429999998 podStartE2EDuration="2.668235543s" podCreationTimestamp="2025-10-03 16:56:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:56:11.665443033 +0000 UTC m=+5290.630999656" watchObservedRunningTime="2025-10-03 16:56:11.668235543 +0000 UTC m=+5290.633792176" Oct 03 16:56:13 crc kubenswrapper[5081]: I1003 16:56:13.668103 5081 generic.go:334] "Generic (PLEG): container finished" podID="5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" containerID="626dfd33d9ca0721cd8a0fb45936ffd3f7f6cc6ff915e7cac93665e9e73deb5c" exitCode=0 Oct 03 16:56:13 crc kubenswrapper[5081]: I1003 16:56:13.668186 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6jswn" event={"ID":"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144","Type":"ContainerDied","Data":"626dfd33d9ca0721cd8a0fb45936ffd3f7f6cc6ff915e7cac93665e9e73deb5c"} Oct 03 16:56:14 crc kubenswrapper[5081]: I1003 16:56:14.978344 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.113285 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpnjx\" (UniqueName: \"kubernetes.io/projected/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-kube-api-access-lpnjx\") pod \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.113468 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-combined-ca-bundle\") pod \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.113522 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-config-data\") pod \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\" (UID: \"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144\") " Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.122771 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-kube-api-access-lpnjx" (OuterVolumeSpecName: "kube-api-access-lpnjx") pod "5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" (UID: "5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144"). InnerVolumeSpecName "kube-api-access-lpnjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.135878 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" (UID: "5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.156131 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-config-data" (OuterVolumeSpecName: "config-data") pod "5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" (UID: "5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.215320 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.215354 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.215363 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpnjx\" (UniqueName: \"kubernetes.io/projected/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144-kube-api-access-lpnjx\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.697849 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6jswn" event={"ID":"5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144","Type":"ContainerDied","Data":"4c591e0886fdd61b6677c124421302d7b4346d5780784aa8a3794c517fd2ed23"} Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.698190 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c591e0886fdd61b6677c124421302d7b4346d5780784aa8a3794c517fd2ed23" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.697943 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6jswn" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.891711 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f6bf76467-6j7lf"] Oct 03 16:56:15 crc kubenswrapper[5081]: E1003 16:56:15.892031 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" containerName="keystone-db-sync" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.892049 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" containerName="keystone-db-sync" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.892207 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" containerName="keystone-db-sync" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.893009 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.909109 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f6bf76467-6j7lf"] Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.988482 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-fzr6z"] Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.989673 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.994111 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.994266 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.994353 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 16:56:15 crc kubenswrapper[5081]: I1003 16:56:15.994640 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bfd8d" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.006421 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fzr6z"] Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.030960 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-dns-svc\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.031042 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-sb\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.031087 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-nb\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.031112 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-984df\" (UniqueName: \"kubernetes.io/projected/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-kube-api-access-984df\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.031147 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-config\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133233 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-config-data\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133301 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-nb\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133320 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-credential-keys\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133339 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-combined-ca-bundle\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133408 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-984df\" (UniqueName: \"kubernetes.io/projected/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-kube-api-access-984df\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133432 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-scripts\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133458 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-config\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.133493 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-fernet-keys\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.134371 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-nb\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.134439 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-dns-svc\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.134543 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcvfl\" (UniqueName: \"kubernetes.io/projected/dd6aecdf-542c-45ed-a100-7a11b433980a-kube-api-access-jcvfl\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.134678 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-config\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.134727 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-sb\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.135155 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-dns-svc\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.135375 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-sb\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.152497 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-984df\" (UniqueName: \"kubernetes.io/projected/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-kube-api-access-984df\") pod \"dnsmasq-dns-f6bf76467-6j7lf\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.212751 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.238507 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-scripts\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.238600 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-fernet-keys\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.238659 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcvfl\" (UniqueName: \"kubernetes.io/projected/dd6aecdf-542c-45ed-a100-7a11b433980a-kube-api-access-jcvfl\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.238709 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-config-data\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.238731 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-credential-keys\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.238748 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-combined-ca-bundle\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.243409 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-combined-ca-bundle\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.243472 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-config-data\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.243950 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-fernet-keys\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.244804 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-scripts\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.246218 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-credential-keys\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.255721 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcvfl\" (UniqueName: \"kubernetes.io/projected/dd6aecdf-542c-45ed-a100-7a11b433980a-kube-api-access-jcvfl\") pod \"keystone-bootstrap-fzr6z\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.308167 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.673658 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f6bf76467-6j7lf"] Oct 03 16:56:16 crc kubenswrapper[5081]: W1003 16:56:16.675886 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3cf53967_2da7_4356_a07a_8c7eaf0ff76a.slice/crio-b0ac78504564744247d6da2ad0eba0f4e34f27bb1fec67973a4890f0fcd5ca0f WatchSource:0}: Error finding container b0ac78504564744247d6da2ad0eba0f4e34f27bb1fec67973a4890f0fcd5ca0f: Status 404 returned error can't find the container with id b0ac78504564744247d6da2ad0eba0f4e34f27bb1fec67973a4890f0fcd5ca0f Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.712372 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" event={"ID":"3cf53967-2da7-4356-a07a-8c7eaf0ff76a","Type":"ContainerStarted","Data":"b0ac78504564744247d6da2ad0eba0f4e34f27bb1fec67973a4890f0fcd5ca0f"} Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.825811 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fzr6z"] Oct 03 16:56:16 crc kubenswrapper[5081]: I1003 16:56:16.828199 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:56:16 crc kubenswrapper[5081]: E1003 16:56:16.828398 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 16:56:16 crc kubenswrapper[5081]: W1003 16:56:16.829873 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd6aecdf_542c_45ed_a100_7a11b433980a.slice/crio-c8f09b08ed5d9baf2333d8ae6459abade57a7881626439635abcf0aaf48e19bf WatchSource:0}: Error finding container c8f09b08ed5d9baf2333d8ae6459abade57a7881626439635abcf0aaf48e19bf: Status 404 returned error can't find the container with id c8f09b08ed5d9baf2333d8ae6459abade57a7881626439635abcf0aaf48e19bf Oct 03 16:56:17 crc kubenswrapper[5081]: I1003 16:56:17.722695 5081 generic.go:334] "Generic (PLEG): container finished" podID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerID="20ca9827a7789a19019c3bde70031053b75be684e6d3d18672b1e1e6cd9a0b1a" exitCode=0 Oct 03 16:56:17 crc kubenswrapper[5081]: I1003 16:56:17.722737 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" event={"ID":"3cf53967-2da7-4356-a07a-8c7eaf0ff76a","Type":"ContainerDied","Data":"20ca9827a7789a19019c3bde70031053b75be684e6d3d18672b1e1e6cd9a0b1a"} Oct 03 16:56:17 crc kubenswrapper[5081]: I1003 16:56:17.725379 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fzr6z" event={"ID":"dd6aecdf-542c-45ed-a100-7a11b433980a","Type":"ContainerStarted","Data":"3455b04993baa2a82bfe3cdfc8e9f0c9f0688fa565074db5e3207752c834f7a8"} Oct 03 16:56:17 crc kubenswrapper[5081]: I1003 16:56:17.725411 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fzr6z" event={"ID":"dd6aecdf-542c-45ed-a100-7a11b433980a","Type":"ContainerStarted","Data":"c8f09b08ed5d9baf2333d8ae6459abade57a7881626439635abcf0aaf48e19bf"} Oct 03 16:56:17 crc kubenswrapper[5081]: I1003 16:56:17.778793 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-fzr6z" podStartSLOduration=2.778764326 podStartE2EDuration="2.778764326s" podCreationTimestamp="2025-10-03 16:56:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:56:17.774151483 +0000 UTC m=+5296.739708096" watchObservedRunningTime="2025-10-03 16:56:17.778764326 +0000 UTC m=+5296.744320939" Oct 03 16:56:18 crc kubenswrapper[5081]: I1003 16:56:18.735344 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" event={"ID":"3cf53967-2da7-4356-a07a-8c7eaf0ff76a","Type":"ContainerStarted","Data":"eac74e8b6166373f528c8a194645c2208324c2e3868c2e168982a08455de07e7"} Oct 03 16:56:18 crc kubenswrapper[5081]: I1003 16:56:18.735715 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:18 crc kubenswrapper[5081]: I1003 16:56:18.754516 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" podStartSLOduration=3.754500835 podStartE2EDuration="3.754500835s" podCreationTimestamp="2025-10-03 16:56:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:56:18.751944771 +0000 UTC m=+5297.717501384" watchObservedRunningTime="2025-10-03 16:56:18.754500835 +0000 UTC m=+5297.720057438" Oct 03 16:56:20 crc kubenswrapper[5081]: I1003 16:56:20.750871 5081 generic.go:334] "Generic (PLEG): container finished" podID="dd6aecdf-542c-45ed-a100-7a11b433980a" containerID="3455b04993baa2a82bfe3cdfc8e9f0c9f0688fa565074db5e3207752c834f7a8" exitCode=0 Oct 03 16:56:20 crc kubenswrapper[5081]: I1003 16:56:20.750962 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fzr6z" event={"ID":"dd6aecdf-542c-45ed-a100-7a11b433980a","Type":"ContainerDied","Data":"3455b04993baa2a82bfe3cdfc8e9f0c9f0688fa565074db5e3207752c834f7a8"} Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.134834 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.253324 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-scripts\") pod \"dd6aecdf-542c-45ed-a100-7a11b433980a\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.253421 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcvfl\" (UniqueName: \"kubernetes.io/projected/dd6aecdf-542c-45ed-a100-7a11b433980a-kube-api-access-jcvfl\") pod \"dd6aecdf-542c-45ed-a100-7a11b433980a\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.253544 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-combined-ca-bundle\") pod \"dd6aecdf-542c-45ed-a100-7a11b433980a\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.253591 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-fernet-keys\") pod \"dd6aecdf-542c-45ed-a100-7a11b433980a\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.253652 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-credential-keys\") pod \"dd6aecdf-542c-45ed-a100-7a11b433980a\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.253698 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-config-data\") pod \"dd6aecdf-542c-45ed-a100-7a11b433980a\" (UID: \"dd6aecdf-542c-45ed-a100-7a11b433980a\") " Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.260842 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-scripts" (OuterVolumeSpecName: "scripts") pod "dd6aecdf-542c-45ed-a100-7a11b433980a" (UID: "dd6aecdf-542c-45ed-a100-7a11b433980a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.261722 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dd6aecdf-542c-45ed-a100-7a11b433980a" (UID: "dd6aecdf-542c-45ed-a100-7a11b433980a"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.266350 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd6aecdf-542c-45ed-a100-7a11b433980a-kube-api-access-jcvfl" (OuterVolumeSpecName: "kube-api-access-jcvfl") pod "dd6aecdf-542c-45ed-a100-7a11b433980a" (UID: "dd6aecdf-542c-45ed-a100-7a11b433980a"). InnerVolumeSpecName "kube-api-access-jcvfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.266389 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dd6aecdf-542c-45ed-a100-7a11b433980a" (UID: "dd6aecdf-542c-45ed-a100-7a11b433980a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.281895 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-config-data" (OuterVolumeSpecName: "config-data") pod "dd6aecdf-542c-45ed-a100-7a11b433980a" (UID: "dd6aecdf-542c-45ed-a100-7a11b433980a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.286192 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd6aecdf-542c-45ed-a100-7a11b433980a" (UID: "dd6aecdf-542c-45ed-a100-7a11b433980a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.356066 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.356119 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.356129 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcvfl\" (UniqueName: \"kubernetes.io/projected/dd6aecdf-542c-45ed-a100-7a11b433980a-kube-api-access-jcvfl\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.356140 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.356147 5081 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.356156 5081 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd6aecdf-542c-45ed-a100-7a11b433980a-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.435587 5081 scope.go:117] "RemoveContainer" containerID="d6fe751e21ba5f507d7d512d31e22c954bbf6c73e8a1c18baffe0f65d31679fa" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.453227 5081 scope.go:117] "RemoveContainer" containerID="c2822ebefc44ea8ec0a5744c377d4488bc9a0f449496837876135de32e9befbd" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.491385 5081 scope.go:117] "RemoveContainer" containerID="c8ec31217d921cb957974cec2359bc4ac273937f97e8e7ba228115a04282ce01" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.510820 5081 scope.go:117] "RemoveContainer" containerID="9b79b6b559dae6cfa6ca0dbceaa271e91e15f297ed794e4922bc5651f05e28d9" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.527939 5081 scope.go:117] "RemoveContainer" containerID="0abfcc406620572a36480c43d866e21ff39fd74b70efb796ea84ee18f858b2ad" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.544233 5081 scope.go:117] "RemoveContainer" containerID="dd1c4a125aae040be25a4546b23759343c3611f57586216abf5fa075ebe10ec8" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.561625 5081 scope.go:117] "RemoveContainer" containerID="fb7662c62795ffb183937d9e10e81d9f16d4087ad2192706730a48d3b60b382a" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.771830 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fzr6z" event={"ID":"dd6aecdf-542c-45ed-a100-7a11b433980a","Type":"ContainerDied","Data":"c8f09b08ed5d9baf2333d8ae6459abade57a7881626439635abcf0aaf48e19bf"} Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.771871 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8f09b08ed5d9baf2333d8ae6459abade57a7881626439635abcf0aaf48e19bf" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.771937 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fzr6z" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.833568 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-fzr6z"] Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.840601 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-fzr6z"] Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.936057 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-scl5w"] Oct 03 16:56:22 crc kubenswrapper[5081]: E1003 16:56:22.936400 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd6aecdf-542c-45ed-a100-7a11b433980a" containerName="keystone-bootstrap" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.936418 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd6aecdf-542c-45ed-a100-7a11b433980a" containerName="keystone-bootstrap" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.936605 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd6aecdf-542c-45ed-a100-7a11b433980a" containerName="keystone-bootstrap" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.937239 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.941091 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.941094 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.941278 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bfd8d" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.941406 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 16:56:22 crc kubenswrapper[5081]: I1003 16:56:22.945466 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-scl5w"] Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.066797 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-combined-ca-bundle\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.066856 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-config-data\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.067141 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-scripts\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.067267 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-fernet-keys\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.067355 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-credential-keys\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.067417 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t7qv\" (UniqueName: \"kubernetes.io/projected/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-kube-api-access-2t7qv\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.168382 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-credential-keys\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.168758 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t7qv\" (UniqueName: \"kubernetes.io/projected/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-kube-api-access-2t7qv\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.168785 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-combined-ca-bundle\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.168809 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-config-data\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.168902 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-scripts\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.168954 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-fernet-keys\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.173750 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-scripts\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.173950 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-credential-keys\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.175099 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-config-data\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.182592 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-fernet-keys\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.185570 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-combined-ca-bundle\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.185932 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t7qv\" (UniqueName: \"kubernetes.io/projected/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-kube-api-access-2t7qv\") pod \"keystone-bootstrap-scl5w\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.256740 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.657099 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-scl5w"] Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.779839 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-scl5w" event={"ID":"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524","Type":"ContainerStarted","Data":"81410b45bc0165a4c1f1c2ec61968db578e77de27e87e8547844fa116ec9c783"} Oct 03 16:56:23 crc kubenswrapper[5081]: I1003 16:56:23.837695 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd6aecdf-542c-45ed-a100-7a11b433980a" path="/var/lib/kubelet/pods/dd6aecdf-542c-45ed-a100-7a11b433980a/volumes" Oct 03 16:56:24 crc kubenswrapper[5081]: I1003 16:56:24.790395 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-scl5w" event={"ID":"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524","Type":"ContainerStarted","Data":"f795ab1c30927ce3856116bc948e6e0eee7c94711e1069738be81eb4c8adadc0"} Oct 03 16:56:24 crc kubenswrapper[5081]: I1003 16:56:24.810970 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-scl5w" podStartSLOduration=2.81095443 podStartE2EDuration="2.81095443s" podCreationTimestamp="2025-10-03 16:56:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:56:24.806486982 +0000 UTC m=+5303.772043605" watchObservedRunningTime="2025-10-03 16:56:24.81095443 +0000 UTC m=+5303.776511043" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.214747 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.274578 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d76bf78b7-hmd68"] Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.274835 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" podUID="91a96197-77e4-4185-8390-3a492352ae24" containerName="dnsmasq-dns" containerID="cri-o://3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0" gracePeriod=10 Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.796050 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.805968 5081 generic.go:334] "Generic (PLEG): container finished" podID="91a96197-77e4-4185-8390-3a492352ae24" containerID="3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0" exitCode=0 Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.806010 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" event={"ID":"91a96197-77e4-4185-8390-3a492352ae24","Type":"ContainerDied","Data":"3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0"} Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.806046 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" event={"ID":"91a96197-77e4-4185-8390-3a492352ae24","Type":"ContainerDied","Data":"62abd556518126a46543973350440d3d6e89cf1418b972cc9a219d95c9d6da00"} Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.806056 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d76bf78b7-hmd68" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.806073 5081 scope.go:117] "RemoveContainer" containerID="3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.810600 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-scl5w" event={"ID":"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524","Type":"ContainerDied","Data":"f795ab1c30927ce3856116bc948e6e0eee7c94711e1069738be81eb4c8adadc0"} Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.810811 5081 generic.go:334] "Generic (PLEG): container finished" podID="ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" containerID="f795ab1c30927ce3856116bc948e6e0eee7c94711e1069738be81eb4c8adadc0" exitCode=0 Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.849280 5081 scope.go:117] "RemoveContainer" containerID="a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.870597 5081 scope.go:117] "RemoveContainer" containerID="3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0" Oct 03 16:56:26 crc kubenswrapper[5081]: E1003 16:56:26.871189 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0\": container with ID starting with 3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0 not found: ID does not exist" containerID="3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.871264 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0"} err="failed to get container status \"3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0\": rpc error: code = NotFound desc = could not find container \"3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0\": container with ID starting with 3cbc8c0340e666855e919cf354b6eae467dd0937dbaecca362bc26d803b857b0 not found: ID does not exist" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.871297 5081 scope.go:117] "RemoveContainer" containerID="a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d" Oct 03 16:56:26 crc kubenswrapper[5081]: E1003 16:56:26.871909 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d\": container with ID starting with a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d not found: ID does not exist" containerID="a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.872027 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d"} err="failed to get container status \"a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d\": rpc error: code = NotFound desc = could not find container \"a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d\": container with ID starting with a9d1312a6226cbdebc678cd68a1e5dadefb1476346865f14e1cc2e774044d33d not found: ID does not exist" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.936313 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-config\") pod \"91a96197-77e4-4185-8390-3a492352ae24\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.936358 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-sb\") pod \"91a96197-77e4-4185-8390-3a492352ae24\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.936497 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-nb\") pod \"91a96197-77e4-4185-8390-3a492352ae24\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.936580 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-dns-svc\") pod \"91a96197-77e4-4185-8390-3a492352ae24\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.937358 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nc5vg\" (UniqueName: \"kubernetes.io/projected/91a96197-77e4-4185-8390-3a492352ae24-kube-api-access-nc5vg\") pod \"91a96197-77e4-4185-8390-3a492352ae24\" (UID: \"91a96197-77e4-4185-8390-3a492352ae24\") " Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.941733 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91a96197-77e4-4185-8390-3a492352ae24-kube-api-access-nc5vg" (OuterVolumeSpecName: "kube-api-access-nc5vg") pod "91a96197-77e4-4185-8390-3a492352ae24" (UID: "91a96197-77e4-4185-8390-3a492352ae24"). InnerVolumeSpecName "kube-api-access-nc5vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.974546 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "91a96197-77e4-4185-8390-3a492352ae24" (UID: "91a96197-77e4-4185-8390-3a492352ae24"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.974823 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "91a96197-77e4-4185-8390-3a492352ae24" (UID: "91a96197-77e4-4185-8390-3a492352ae24"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.978968 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-config" (OuterVolumeSpecName: "config") pod "91a96197-77e4-4185-8390-3a492352ae24" (UID: "91a96197-77e4-4185-8390-3a492352ae24"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:56:26 crc kubenswrapper[5081]: I1003 16:56:26.980137 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "91a96197-77e4-4185-8390-3a492352ae24" (UID: "91a96197-77e4-4185-8390-3a492352ae24"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.039115 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-config\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.039146 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.039154 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.039162 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91a96197-77e4-4185-8390-3a492352ae24-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.039173 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nc5vg\" (UniqueName: \"kubernetes.io/projected/91a96197-77e4-4185-8390-3a492352ae24-kube-api-access-nc5vg\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.139445 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d76bf78b7-hmd68"] Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.146354 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d76bf78b7-hmd68"] Oct 03 16:56:27 crc kubenswrapper[5081]: I1003 16:56:27.845290 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91a96197-77e4-4185-8390-3a492352ae24" path="/var/lib/kubelet/pods/91a96197-77e4-4185-8390-3a492352ae24/volumes" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.120260 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.157101 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-config-data\") pod \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.157198 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-credential-keys\") pod \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.157266 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-fernet-keys\") pod \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.157304 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t7qv\" (UniqueName: \"kubernetes.io/projected/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-kube-api-access-2t7qv\") pod \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.157333 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-combined-ca-bundle\") pod \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.157356 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-scripts\") pod \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\" (UID: \"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524\") " Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.160682 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" (UID: "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.160901 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" (UID: "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.160933 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-kube-api-access-2t7qv" (OuterVolumeSpecName: "kube-api-access-2t7qv") pod "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" (UID: "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524"). InnerVolumeSpecName "kube-api-access-2t7qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.162829 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-scripts" (OuterVolumeSpecName: "scripts") pod "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" (UID: "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.177840 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" (UID: "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.179699 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-config-data" (OuterVolumeSpecName: "config-data") pod "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" (UID: "ccc072d6-fcd6-4318-a9c8-b2cc91f7a524"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.258671 5081 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.258717 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t7qv\" (UniqueName: \"kubernetes.io/projected/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-kube-api-access-2t7qv\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.258730 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.258741 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.258749 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.258758 5081 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.828700 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-scl5w" event={"ID":"ccc072d6-fcd6-4318-a9c8-b2cc91f7a524","Type":"ContainerDied","Data":"81410b45bc0165a4c1f1c2ec61968db578e77de27e87e8547844fa116ec9c783"} Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.828733 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81410b45bc0165a4c1f1c2ec61968db578e77de27e87e8547844fa116ec9c783" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.828744 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-scl5w" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.913429 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6f87fd488-szdht"] Oct 03 16:56:28 crc kubenswrapper[5081]: E1003 16:56:28.913803 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91a96197-77e4-4185-8390-3a492352ae24" containerName="dnsmasq-dns" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.913816 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="91a96197-77e4-4185-8390-3a492352ae24" containerName="dnsmasq-dns" Oct 03 16:56:28 crc kubenswrapper[5081]: E1003 16:56:28.913830 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" containerName="keystone-bootstrap" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.913836 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" containerName="keystone-bootstrap" Oct 03 16:56:28 crc kubenswrapper[5081]: E1003 16:56:28.913853 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91a96197-77e4-4185-8390-3a492352ae24" containerName="init" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.913859 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="91a96197-77e4-4185-8390-3a492352ae24" containerName="init" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.913997 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="91a96197-77e4-4185-8390-3a492352ae24" containerName="dnsmasq-dns" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.914020 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" containerName="keystone-bootstrap" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.914601 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.921787 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.923844 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.924021 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.924149 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-bfd8d" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.936293 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6f87fd488-szdht"] Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.975220 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-fernet-keys\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.975283 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-credential-keys\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.975359 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d55c8\" (UniqueName: \"kubernetes.io/projected/84d9e617-4210-4e00-9fe0-f6c065ce1282-kube-api-access-d55c8\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.975395 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-combined-ca-bundle\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.975436 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-config-data\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:28 crc kubenswrapper[5081]: I1003 16:56:28.975479 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-scripts\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.076801 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-scripts\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.077130 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-fernet-keys\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.077278 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-credential-keys\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.077418 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d55c8\" (UniqueName: \"kubernetes.io/projected/84d9e617-4210-4e00-9fe0-f6c065ce1282-kube-api-access-d55c8\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.077551 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-combined-ca-bundle\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.077695 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-config-data\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.082530 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-scripts\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.083276 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-combined-ca-bundle\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.083680 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-fernet-keys\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.084933 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-credential-keys\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.085002 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d9e617-4210-4e00-9fe0-f6c065ce1282-config-data\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.095424 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d55c8\" (UniqueName: \"kubernetes.io/projected/84d9e617-4210-4e00-9fe0-f6c065ce1282-kube-api-access-d55c8\") pod \"keystone-6f87fd488-szdht\" (UID: \"84d9e617-4210-4e00-9fe0-f6c065ce1282\") " pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.238022 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.643891 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6f87fd488-szdht"] Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.836900 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6f87fd488-szdht" event={"ID":"84d9e617-4210-4e00-9fe0-f6c065ce1282","Type":"ContainerStarted","Data":"94d22576b7a4aadbbab05936e99bc18e23f90c59e3dbd2efb2a456558573b0ef"} Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.836942 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6f87fd488-szdht" event={"ID":"84d9e617-4210-4e00-9fe0-f6c065ce1282","Type":"ContainerStarted","Data":"f74eaeba7721c51ad6e88b46dc223ae792ccad582c9da7ca41bffd26dad38682"} Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.837979 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:56:29 crc kubenswrapper[5081]: I1003 16:56:29.851876 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6f87fd488-szdht" podStartSLOduration=1.851858732 podStartE2EDuration="1.851858732s" podCreationTimestamp="2025-10-03 16:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:56:29.851295566 +0000 UTC m=+5308.816852189" watchObservedRunningTime="2025-10-03 16:56:29.851858732 +0000 UTC m=+5308.817415345" Oct 03 16:56:31 crc kubenswrapper[5081]: I1003 16:56:31.835524 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 16:56:32 crc kubenswrapper[5081]: I1003 16:56:32.860332 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"8921388ca71753ab3c7b893bdb2f477facaefffa8aca97d42c54c24634476e41"} Oct 03 16:57:00 crc kubenswrapper[5081]: I1003 16:57:00.769211 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6f87fd488-szdht" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.533539 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.535268 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.538129 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.538281 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-89kww" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.538329 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.540522 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.699510 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config-secret\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.699804 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.700027 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n95j\" (UniqueName: \"kubernetes.io/projected/357afcbe-1c13-4929-a872-ca9d48a0d1cd-kube-api-access-7n95j\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.802130 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config-secret\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.802181 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.802233 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n95j\" (UniqueName: \"kubernetes.io/projected/357afcbe-1c13-4929-a872-ca9d48a0d1cd-kube-api-access-7n95j\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.803322 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.809127 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config-secret\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.827456 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n95j\" (UniqueName: \"kubernetes.io/projected/357afcbe-1c13-4929-a872-ca9d48a0d1cd-kube-api-access-7n95j\") pod \"openstackclient\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " pod="openstack/openstackclient" Oct 03 16:57:04 crc kubenswrapper[5081]: I1003 16:57:04.857761 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 16:57:05 crc kubenswrapper[5081]: I1003 16:57:05.298214 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 16:57:05 crc kubenswrapper[5081]: W1003 16:57:05.302995 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod357afcbe_1c13_4929_a872_ca9d48a0d1cd.slice/crio-183cbda0b61891b6289c257abb9b7d88c2e165cb1986ca1c2bea89c2610bede2 WatchSource:0}: Error finding container 183cbda0b61891b6289c257abb9b7d88c2e165cb1986ca1c2bea89c2610bede2: Status 404 returned error can't find the container with id 183cbda0b61891b6289c257abb9b7d88c2e165cb1986ca1c2bea89c2610bede2 Oct 03 16:57:06 crc kubenswrapper[5081]: I1003 16:57:06.154248 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"357afcbe-1c13-4929-a872-ca9d48a0d1cd","Type":"ContainerStarted","Data":"ded69724ea946f0a2544294cc22d841f931373bd41ecdcb0c02078ae0a72bb62"} Oct 03 16:57:06 crc kubenswrapper[5081]: I1003 16:57:06.155772 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"357afcbe-1c13-4929-a872-ca9d48a0d1cd","Type":"ContainerStarted","Data":"183cbda0b61891b6289c257abb9b7d88c2e165cb1986ca1c2bea89c2610bede2"} Oct 03 16:57:06 crc kubenswrapper[5081]: I1003 16:57:06.171233 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.171216889 podStartE2EDuration="2.171216889s" podCreationTimestamp="2025-10-03 16:57:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:57:06.168857631 +0000 UTC m=+5345.134414274" watchObservedRunningTime="2025-10-03 16:57:06.171216889 +0000 UTC m=+5345.136773502" Oct 03 16:58:48 crc kubenswrapper[5081]: I1003 16:58:48.964132 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-vfkpg"] Oct 03 16:58:48 crc kubenswrapper[5081]: I1003 16:58:48.966094 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vfkpg" Oct 03 16:58:48 crc kubenswrapper[5081]: I1003 16:58:48.972153 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-vfkpg"] Oct 03 16:58:48 crc kubenswrapper[5081]: I1003 16:58:48.981408 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m8w7\" (UniqueName: \"kubernetes.io/projected/218e531e-e3bb-4690-a0c9-31a6a56c01ac-kube-api-access-2m8w7\") pod \"barbican-db-create-vfkpg\" (UID: \"218e531e-e3bb-4690-a0c9-31a6a56c01ac\") " pod="openstack/barbican-db-create-vfkpg" Oct 03 16:58:49 crc kubenswrapper[5081]: I1003 16:58:49.082620 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m8w7\" (UniqueName: \"kubernetes.io/projected/218e531e-e3bb-4690-a0c9-31a6a56c01ac-kube-api-access-2m8w7\") pod \"barbican-db-create-vfkpg\" (UID: \"218e531e-e3bb-4690-a0c9-31a6a56c01ac\") " pod="openstack/barbican-db-create-vfkpg" Oct 03 16:58:49 crc kubenswrapper[5081]: I1003 16:58:49.104378 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m8w7\" (UniqueName: \"kubernetes.io/projected/218e531e-e3bb-4690-a0c9-31a6a56c01ac-kube-api-access-2m8w7\") pod \"barbican-db-create-vfkpg\" (UID: \"218e531e-e3bb-4690-a0c9-31a6a56c01ac\") " pod="openstack/barbican-db-create-vfkpg" Oct 03 16:58:49 crc kubenswrapper[5081]: I1003 16:58:49.292292 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vfkpg" Oct 03 16:58:49 crc kubenswrapper[5081]: I1003 16:58:49.698496 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-vfkpg"] Oct 03 16:58:50 crc kubenswrapper[5081]: I1003 16:58:50.001858 5081 generic.go:334] "Generic (PLEG): container finished" podID="218e531e-e3bb-4690-a0c9-31a6a56c01ac" containerID="a42cb1ef112f1313e16b17cfc3b03f8475dfcdc4853e99f0c8655266d4e6649e" exitCode=0 Oct 03 16:58:50 crc kubenswrapper[5081]: I1003 16:58:50.001891 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vfkpg" event={"ID":"218e531e-e3bb-4690-a0c9-31a6a56c01ac","Type":"ContainerDied","Data":"a42cb1ef112f1313e16b17cfc3b03f8475dfcdc4853e99f0c8655266d4e6649e"} Oct 03 16:58:50 crc kubenswrapper[5081]: I1003 16:58:50.001940 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vfkpg" event={"ID":"218e531e-e3bb-4690-a0c9-31a6a56c01ac","Type":"ContainerStarted","Data":"d90986a74e07c976fcb3b45ce84d7e08c0fae5da431932975b64693329d919c5"} Oct 03 16:58:51 crc kubenswrapper[5081]: I1003 16:58:51.308722 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vfkpg" Oct 03 16:58:51 crc kubenswrapper[5081]: I1003 16:58:51.420024 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m8w7\" (UniqueName: \"kubernetes.io/projected/218e531e-e3bb-4690-a0c9-31a6a56c01ac-kube-api-access-2m8w7\") pod \"218e531e-e3bb-4690-a0c9-31a6a56c01ac\" (UID: \"218e531e-e3bb-4690-a0c9-31a6a56c01ac\") " Oct 03 16:58:51 crc kubenswrapper[5081]: I1003 16:58:51.427588 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/218e531e-e3bb-4690-a0c9-31a6a56c01ac-kube-api-access-2m8w7" (OuterVolumeSpecName: "kube-api-access-2m8w7") pod "218e531e-e3bb-4690-a0c9-31a6a56c01ac" (UID: "218e531e-e3bb-4690-a0c9-31a6a56c01ac"). InnerVolumeSpecName "kube-api-access-2m8w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:58:51 crc kubenswrapper[5081]: I1003 16:58:51.523456 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m8w7\" (UniqueName: \"kubernetes.io/projected/218e531e-e3bb-4690-a0c9-31a6a56c01ac-kube-api-access-2m8w7\") on node \"crc\" DevicePath \"\"" Oct 03 16:58:52 crc kubenswrapper[5081]: I1003 16:58:52.019309 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vfkpg" event={"ID":"218e531e-e3bb-4690-a0c9-31a6a56c01ac","Type":"ContainerDied","Data":"d90986a74e07c976fcb3b45ce84d7e08c0fae5da431932975b64693329d919c5"} Oct 03 16:58:52 crc kubenswrapper[5081]: I1003 16:58:52.019682 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d90986a74e07c976fcb3b45ce84d7e08c0fae5da431932975b64693329d919c5" Oct 03 16:58:52 crc kubenswrapper[5081]: I1003 16:58:52.019356 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vfkpg" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.067453 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6a43-account-create-dcnxj"] Oct 03 16:58:59 crc kubenswrapper[5081]: E1003 16:58:59.070990 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="218e531e-e3bb-4690-a0c9-31a6a56c01ac" containerName="mariadb-database-create" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.071082 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="218e531e-e3bb-4690-a0c9-31a6a56c01ac" containerName="mariadb-database-create" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.071324 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="218e531e-e3bb-4690-a0c9-31a6a56c01ac" containerName="mariadb-database-create" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.072295 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a43-account-create-dcnxj" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.075100 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.082346 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6a43-account-create-dcnxj"] Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.267243 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmlwn\" (UniqueName: \"kubernetes.io/projected/e888ae0e-fada-4076-9f60-c20eaf243332-kube-api-access-nmlwn\") pod \"barbican-6a43-account-create-dcnxj\" (UID: \"e888ae0e-fada-4076-9f60-c20eaf243332\") " pod="openstack/barbican-6a43-account-create-dcnxj" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.369587 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmlwn\" (UniqueName: \"kubernetes.io/projected/e888ae0e-fada-4076-9f60-c20eaf243332-kube-api-access-nmlwn\") pod \"barbican-6a43-account-create-dcnxj\" (UID: \"e888ae0e-fada-4076-9f60-c20eaf243332\") " pod="openstack/barbican-6a43-account-create-dcnxj" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.390886 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmlwn\" (UniqueName: \"kubernetes.io/projected/e888ae0e-fada-4076-9f60-c20eaf243332-kube-api-access-nmlwn\") pod \"barbican-6a43-account-create-dcnxj\" (UID: \"e888ae0e-fada-4076-9f60-c20eaf243332\") " pod="openstack/barbican-6a43-account-create-dcnxj" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.395613 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a43-account-create-dcnxj" Oct 03 16:58:59 crc kubenswrapper[5081]: I1003 16:58:59.800743 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6a43-account-create-dcnxj"] Oct 03 16:59:00 crc kubenswrapper[5081]: I1003 16:59:00.080443 5081 generic.go:334] "Generic (PLEG): container finished" podID="e888ae0e-fada-4076-9f60-c20eaf243332" containerID="68ea8064f8b3f2007f1e817741ac213a8089a36cf9c798fd7fc871e1d8318c60" exitCode=0 Oct 03 16:59:00 crc kubenswrapper[5081]: I1003 16:59:00.080493 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6a43-account-create-dcnxj" event={"ID":"e888ae0e-fada-4076-9f60-c20eaf243332","Type":"ContainerDied","Data":"68ea8064f8b3f2007f1e817741ac213a8089a36cf9c798fd7fc871e1d8318c60"} Oct 03 16:59:00 crc kubenswrapper[5081]: I1003 16:59:00.080750 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6a43-account-create-dcnxj" event={"ID":"e888ae0e-fada-4076-9f60-c20eaf243332","Type":"ContainerStarted","Data":"ca9a7da28f06bd7aabf70b559339446efe5482a2de8df02bb740db7bfabcb667"} Oct 03 16:59:00 crc kubenswrapper[5081]: I1003 16:59:00.647152 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:59:00 crc kubenswrapper[5081]: I1003 16:59:00.647212 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:59:01 crc kubenswrapper[5081]: I1003 16:59:01.385585 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a43-account-create-dcnxj" Oct 03 16:59:01 crc kubenswrapper[5081]: I1003 16:59:01.504620 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmlwn\" (UniqueName: \"kubernetes.io/projected/e888ae0e-fada-4076-9f60-c20eaf243332-kube-api-access-nmlwn\") pod \"e888ae0e-fada-4076-9f60-c20eaf243332\" (UID: \"e888ae0e-fada-4076-9f60-c20eaf243332\") " Oct 03 16:59:01 crc kubenswrapper[5081]: I1003 16:59:01.510261 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e888ae0e-fada-4076-9f60-c20eaf243332-kube-api-access-nmlwn" (OuterVolumeSpecName: "kube-api-access-nmlwn") pod "e888ae0e-fada-4076-9f60-c20eaf243332" (UID: "e888ae0e-fada-4076-9f60-c20eaf243332"). InnerVolumeSpecName "kube-api-access-nmlwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:59:01 crc kubenswrapper[5081]: I1003 16:59:01.606165 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmlwn\" (UniqueName: \"kubernetes.io/projected/e888ae0e-fada-4076-9f60-c20eaf243332-kube-api-access-nmlwn\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:02 crc kubenswrapper[5081]: I1003 16:59:02.103975 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6a43-account-create-dcnxj" event={"ID":"e888ae0e-fada-4076-9f60-c20eaf243332","Type":"ContainerDied","Data":"ca9a7da28f06bd7aabf70b559339446efe5482a2de8df02bb740db7bfabcb667"} Oct 03 16:59:02 crc kubenswrapper[5081]: I1003 16:59:02.104027 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca9a7da28f06bd7aabf70b559339446efe5482a2de8df02bb740db7bfabcb667" Oct 03 16:59:02 crc kubenswrapper[5081]: I1003 16:59:02.104165 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a43-account-create-dcnxj" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.287357 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-pws7r"] Oct 03 16:59:04 crc kubenswrapper[5081]: E1003 16:59:04.288373 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e888ae0e-fada-4076-9f60-c20eaf243332" containerName="mariadb-account-create" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.288393 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e888ae0e-fada-4076-9f60-c20eaf243332" containerName="mariadb-account-create" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.288658 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e888ae0e-fada-4076-9f60-c20eaf243332" containerName="mariadb-account-create" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.289454 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.291814 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2fhtg" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.292016 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.300064 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-pws7r"] Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.449194 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-combined-ca-bundle\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.449271 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5vq5\" (UniqueName: \"kubernetes.io/projected/10c245e9-2377-4ab8-9048-41bde14b0d68-kube-api-access-n5vq5\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.449973 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-db-sync-config-data\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.552624 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-combined-ca-bundle\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.552774 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5vq5\" (UniqueName: \"kubernetes.io/projected/10c245e9-2377-4ab8-9048-41bde14b0d68-kube-api-access-n5vq5\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.552885 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-db-sync-config-data\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.561292 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-db-sync-config-data\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.561427 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-combined-ca-bundle\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.583772 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5vq5\" (UniqueName: \"kubernetes.io/projected/10c245e9-2377-4ab8-9048-41bde14b0d68-kube-api-access-n5vq5\") pod \"barbican-db-sync-pws7r\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:04 crc kubenswrapper[5081]: I1003 16:59:04.611598 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:05 crc kubenswrapper[5081]: I1003 16:59:05.108327 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-pws7r"] Oct 03 16:59:05 crc kubenswrapper[5081]: I1003 16:59:05.128577 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pws7r" event={"ID":"10c245e9-2377-4ab8-9048-41bde14b0d68","Type":"ContainerStarted","Data":"5dc41eb1e6176b032c1fa159870e41b9a0560554dadf1994a493740e69988fac"} Oct 03 16:59:06 crc kubenswrapper[5081]: I1003 16:59:06.137331 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pws7r" event={"ID":"10c245e9-2377-4ab8-9048-41bde14b0d68","Type":"ContainerStarted","Data":"b99cb7ec76ba8a5eb7f9863072a3efe8a5184fbc204101282ac97f3a34c33d61"} Oct 03 16:59:06 crc kubenswrapper[5081]: I1003 16:59:06.153066 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-pws7r" podStartSLOduration=2.153046317 podStartE2EDuration="2.153046317s" podCreationTimestamp="2025-10-03 16:59:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:06.149579957 +0000 UTC m=+5465.115136590" watchObservedRunningTime="2025-10-03 16:59:06.153046317 +0000 UTC m=+5465.118602930" Oct 03 16:59:06 crc kubenswrapper[5081]: E1003 16:59:06.671691 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode888ae0e_fada_4076_9f60_c20eaf243332.slice\": RecentStats: unable to find data in memory cache]" Oct 03 16:59:07 crc kubenswrapper[5081]: I1003 16:59:07.146060 5081 generic.go:334] "Generic (PLEG): container finished" podID="10c245e9-2377-4ab8-9048-41bde14b0d68" containerID="b99cb7ec76ba8a5eb7f9863072a3efe8a5184fbc204101282ac97f3a34c33d61" exitCode=0 Oct 03 16:59:07 crc kubenswrapper[5081]: I1003 16:59:07.146120 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pws7r" event={"ID":"10c245e9-2377-4ab8-9048-41bde14b0d68","Type":"ContainerDied","Data":"b99cb7ec76ba8a5eb7f9863072a3efe8a5184fbc204101282ac97f3a34c33d61"} Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.439164 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.618345 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-db-sync-config-data\") pod \"10c245e9-2377-4ab8-9048-41bde14b0d68\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.618461 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-combined-ca-bundle\") pod \"10c245e9-2377-4ab8-9048-41bde14b0d68\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.618521 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5vq5\" (UniqueName: \"kubernetes.io/projected/10c245e9-2377-4ab8-9048-41bde14b0d68-kube-api-access-n5vq5\") pod \"10c245e9-2377-4ab8-9048-41bde14b0d68\" (UID: \"10c245e9-2377-4ab8-9048-41bde14b0d68\") " Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.623238 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "10c245e9-2377-4ab8-9048-41bde14b0d68" (UID: "10c245e9-2377-4ab8-9048-41bde14b0d68"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.623544 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10c245e9-2377-4ab8-9048-41bde14b0d68-kube-api-access-n5vq5" (OuterVolumeSpecName: "kube-api-access-n5vq5") pod "10c245e9-2377-4ab8-9048-41bde14b0d68" (UID: "10c245e9-2377-4ab8-9048-41bde14b0d68"). InnerVolumeSpecName "kube-api-access-n5vq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.639873 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10c245e9-2377-4ab8-9048-41bde14b0d68" (UID: "10c245e9-2377-4ab8-9048-41bde14b0d68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.721032 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.721097 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5vq5\" (UniqueName: \"kubernetes.io/projected/10c245e9-2377-4ab8-9048-41bde14b0d68-kube-api-access-n5vq5\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:08 crc kubenswrapper[5081]: I1003 16:59:08.721109 5081 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/10c245e9-2377-4ab8-9048-41bde14b0d68-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.162787 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-pws7r" event={"ID":"10c245e9-2377-4ab8-9048-41bde14b0d68","Type":"ContainerDied","Data":"5dc41eb1e6176b032c1fa159870e41b9a0560554dadf1994a493740e69988fac"} Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.162827 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dc41eb1e6176b032c1fa159870e41b9a0560554dadf1994a493740e69988fac" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.162858 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-pws7r" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.367143 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-df9f9c479-tccq5"] Oct 03 16:59:09 crc kubenswrapper[5081]: E1003 16:59:09.367579 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c245e9-2377-4ab8-9048-41bde14b0d68" containerName="barbican-db-sync" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.367596 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c245e9-2377-4ab8-9048-41bde14b0d68" containerName="barbican-db-sync" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.367804 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="10c245e9-2377-4ab8-9048-41bde14b0d68" containerName="barbican-db-sync" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.371874 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.376161 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.376342 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.376428 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2fhtg" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.381218 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-df9f9c479-tccq5"] Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.462500 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-78657f88c4-88v4m"] Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.463799 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.469276 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.478247 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-78657f88c4-88v4m"] Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.541355 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-combined-ca-bundle\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.541423 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-logs\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.541464 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-config-data-custom\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.541496 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-config-data\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.541519 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p24pn\" (UniqueName: \"kubernetes.io/projected/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-kube-api-access-p24pn\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.541695 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f7b6f7cd7-l49t9"] Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.543516 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.558387 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f7b6f7cd7-l49t9"] Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646420 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p24pn\" (UniqueName: \"kubernetes.io/projected/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-kube-api-access-p24pn\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646480 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpqnh\" (UniqueName: \"kubernetes.io/projected/da20472c-048f-4648-9bb5-fdee538607f4-kube-api-access-bpqnh\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646518 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-sb\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646548 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-config\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646591 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-combined-ca-bundle\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646610 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-dns-svc\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646628 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-nb\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646652 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da20472c-048f-4648-9bb5-fdee538607f4-logs\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646681 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-combined-ca-bundle\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646698 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjc54\" (UniqueName: \"kubernetes.io/projected/70568631-364c-432d-ac9f-4c6d2a843b83-kube-api-access-xjc54\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646716 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-config-data\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646744 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-logs\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646770 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-config-data-custom\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646787 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-config-data-custom\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.646806 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-config-data\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.652383 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-logs\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.659425 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-config-data\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.665315 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-combined-ca-bundle\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.686251 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-config-data-custom\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.699327 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p24pn\" (UniqueName: \"kubernetes.io/projected/03abedc4-2d10-4f99-bbe3-df6ddc2e853b-kube-api-access-p24pn\") pod \"barbican-worker-df9f9c479-tccq5\" (UID: \"03abedc4-2d10-4f99-bbe3-df6ddc2e853b\") " pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.703631 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-577f988d76-v8j4v"] Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.714006 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.718845 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.720047 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-df9f9c479-tccq5" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.727609 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-577f988d76-v8j4v"] Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.747798 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da20472c-048f-4648-9bb5-fdee538607f4-logs\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.747845 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-config-data\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.747866 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjc54\" (UniqueName: \"kubernetes.io/projected/70568631-364c-432d-ac9f-4c6d2a843b83-kube-api-access-xjc54\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.747915 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-config-data-custom\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.747945 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpqnh\" (UniqueName: \"kubernetes.io/projected/da20472c-048f-4648-9bb5-fdee538607f4-kube-api-access-bpqnh\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.747978 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-sb\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.748002 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-config\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.748026 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-combined-ca-bundle\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.748045 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-dns-svc\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.748069 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-nb\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.748640 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da20472c-048f-4648-9bb5-fdee538607f4-logs\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.751552 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-config\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.752077 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-sb\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.752816 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-dns-svc\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.765001 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-nb\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.775295 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-config-data\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.784235 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-combined-ca-bundle\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.788672 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da20472c-048f-4648-9bb5-fdee538607f4-config-data-custom\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.788948 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpqnh\" (UniqueName: \"kubernetes.io/projected/da20472c-048f-4648-9bb5-fdee538607f4-kube-api-access-bpqnh\") pod \"barbican-keystone-listener-78657f88c4-88v4m\" (UID: \"da20472c-048f-4648-9bb5-fdee538607f4\") " pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.797758 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjc54\" (UniqueName: \"kubernetes.io/projected/70568631-364c-432d-ac9f-4c6d2a843b83-kube-api-access-xjc54\") pod \"dnsmasq-dns-6f7b6f7cd7-l49t9\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.799955 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.852089 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-combined-ca-bundle\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.852141 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-config-data\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.852167 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-config-data-custom\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.852326 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff3fb265-cd51-4e41-823f-c0aa9bea922b-logs\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.852403 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g79ts\" (UniqueName: \"kubernetes.io/projected/ff3fb265-cd51-4e41-823f-c0aa9bea922b-kube-api-access-g79ts\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.871386 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.961815 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g79ts\" (UniqueName: \"kubernetes.io/projected/ff3fb265-cd51-4e41-823f-c0aa9bea922b-kube-api-access-g79ts\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.962339 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-combined-ca-bundle\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.962404 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-config-data\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.962438 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-config-data-custom\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.962506 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff3fb265-cd51-4e41-823f-c0aa9bea922b-logs\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.979607 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-combined-ca-bundle\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:09 crc kubenswrapper[5081]: I1003 16:59:09.992671 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff3fb265-cd51-4e41-823f-c0aa9bea922b-logs\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:09.999115 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g79ts\" (UniqueName: \"kubernetes.io/projected/ff3fb265-cd51-4e41-823f-c0aa9bea922b-kube-api-access-g79ts\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:10.001149 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-config-data-custom\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:10.003023 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3fb265-cd51-4e41-823f-c0aa9bea922b-config-data\") pod \"barbican-api-577f988d76-v8j4v\" (UID: \"ff3fb265-cd51-4e41-823f-c0aa9bea922b\") " pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:10.229317 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:10.241630 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-df9f9c479-tccq5"] Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:10.332977 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-78657f88c4-88v4m"] Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:10.411113 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f7b6f7cd7-l49t9"] Oct 03 16:59:10 crc kubenswrapper[5081]: I1003 16:59:10.705315 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-577f988d76-v8j4v"] Oct 03 16:59:10 crc kubenswrapper[5081]: W1003 16:59:10.721279 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff3fb265_cd51_4e41_823f_c0aa9bea922b.slice/crio-ee5aa81d7a6dc111e1091f01eb8159b4c0fe67b81c8079dd7ead7b89934bc862 WatchSource:0}: Error finding container ee5aa81d7a6dc111e1091f01eb8159b4c0fe67b81c8079dd7ead7b89934bc862: Status 404 returned error can't find the container with id ee5aa81d7a6dc111e1091f01eb8159b4c0fe67b81c8079dd7ead7b89934bc862 Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.182468 5081 generic.go:334] "Generic (PLEG): container finished" podID="70568631-364c-432d-ac9f-4c6d2a843b83" containerID="04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3" exitCode=0 Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.182505 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" event={"ID":"70568631-364c-432d-ac9f-4c6d2a843b83","Type":"ContainerDied","Data":"04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.182910 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" event={"ID":"70568631-364c-432d-ac9f-4c6d2a843b83","Type":"ContainerStarted","Data":"88c09840d2d97029781ab1850c65e4cba9477827898243ad1df17f5718430bb8"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.185667 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" event={"ID":"da20472c-048f-4648-9bb5-fdee538607f4","Type":"ContainerStarted","Data":"3dfa88598381a00be2adfd4bc4c5426772d53ea8d254b7633e6abc78882688ab"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.185700 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" event={"ID":"da20472c-048f-4648-9bb5-fdee538607f4","Type":"ContainerStarted","Data":"ecc2489d35051145e96ef1e56cb6b6c1eda4442dfa92444d6503b85fa7dc9f31"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.185716 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" event={"ID":"da20472c-048f-4648-9bb5-fdee538607f4","Type":"ContainerStarted","Data":"67bf84b2efc2e8816a68c6558fb7850c5ce06a08bface36a19551d892f792772"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.191423 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-df9f9c479-tccq5" event={"ID":"03abedc4-2d10-4f99-bbe3-df6ddc2e853b","Type":"ContainerStarted","Data":"78b771fb6f39d666350cef43c3820a95ed0338fc8adc6b02d0e7634dabdf6ceb"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.191780 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-df9f9c479-tccq5" event={"ID":"03abedc4-2d10-4f99-bbe3-df6ddc2e853b","Type":"ContainerStarted","Data":"0dd9f314af97616178f8bd29027083029d943f62af2d0c30b9530578e276e675"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.191936 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-df9f9c479-tccq5" event={"ID":"03abedc4-2d10-4f99-bbe3-df6ddc2e853b","Type":"ContainerStarted","Data":"35c4d529e5898c70ce1fb2ede85dfa511295b8d38571c6a63a9250175c851404"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.196476 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577f988d76-v8j4v" event={"ID":"ff3fb265-cd51-4e41-823f-c0aa9bea922b","Type":"ContainerStarted","Data":"6720425e6232294c804bb6f90c544674802722ef3d4d206a9fb5e314bd400275"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.197178 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577f988d76-v8j4v" event={"ID":"ff3fb265-cd51-4e41-823f-c0aa9bea922b","Type":"ContainerStarted","Data":"46dd099800be92fc64c094acd372926e902eea540cb681da5762bc7da2d43e2a"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.197233 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577f988d76-v8j4v" event={"ID":"ff3fb265-cd51-4e41-823f-c0aa9bea922b","Type":"ContainerStarted","Data":"ee5aa81d7a6dc111e1091f01eb8159b4c0fe67b81c8079dd7ead7b89934bc862"} Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.197280 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.197305 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.231076 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-df9f9c479-tccq5" podStartSLOduration=2.231055947 podStartE2EDuration="2.231055947s" podCreationTimestamp="2025-10-03 16:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:11.223942122 +0000 UTC m=+5470.189498755" watchObservedRunningTime="2025-10-03 16:59:11.231055947 +0000 UTC m=+5470.196612570" Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.256810 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-78657f88c4-88v4m" podStartSLOduration=2.256782057 podStartE2EDuration="2.256782057s" podCreationTimestamp="2025-10-03 16:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:11.245278396 +0000 UTC m=+5470.210835009" watchObservedRunningTime="2025-10-03 16:59:11.256782057 +0000 UTC m=+5470.222338680" Oct 03 16:59:11 crc kubenswrapper[5081]: I1003 16:59:11.270254 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-577f988d76-v8j4v" podStartSLOduration=2.270238614 podStartE2EDuration="2.270238614s" podCreationTimestamp="2025-10-03 16:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:11.265978541 +0000 UTC m=+5470.231535154" watchObservedRunningTime="2025-10-03 16:59:11.270238614 +0000 UTC m=+5470.235795227" Oct 03 16:59:12 crc kubenswrapper[5081]: I1003 16:59:12.212123 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" event={"ID":"70568631-364c-432d-ac9f-4c6d2a843b83","Type":"ContainerStarted","Data":"6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2"} Oct 03 16:59:12 crc kubenswrapper[5081]: I1003 16:59:12.231488 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" podStartSLOduration=3.231472856 podStartE2EDuration="3.231472856s" podCreationTimestamp="2025-10-03 16:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:12.227494372 +0000 UTC m=+5471.193051005" watchObservedRunningTime="2025-10-03 16:59:12.231472856 +0000 UTC m=+5471.197029469" Oct 03 16:59:13 crc kubenswrapper[5081]: I1003 16:59:13.220136 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:16 crc kubenswrapper[5081]: I1003 16:59:16.730122 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:16 crc kubenswrapper[5081]: E1003 16:59:16.901071 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode888ae0e_fada_4076_9f60_c20eaf243332.slice\": RecentStats: unable to find data in memory cache]" Oct 03 16:59:18 crc kubenswrapper[5081]: I1003 16:59:18.218295 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-577f988d76-v8j4v" Oct 03 16:59:19 crc kubenswrapper[5081]: I1003 16:59:19.873680 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 16:59:19 crc kubenswrapper[5081]: I1003 16:59:19.940832 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f6bf76467-6j7lf"] Oct 03 16:59:19 crc kubenswrapper[5081]: I1003 16:59:19.941830 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" podUID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerName="dnsmasq-dns" containerID="cri-o://eac74e8b6166373f528c8a194645c2208324c2e3868c2e168982a08455de07e7" gracePeriod=10 Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.291259 5081 generic.go:334] "Generic (PLEG): container finished" podID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerID="eac74e8b6166373f528c8a194645c2208324c2e3868c2e168982a08455de07e7" exitCode=0 Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.291446 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" event={"ID":"3cf53967-2da7-4356-a07a-8c7eaf0ff76a","Type":"ContainerDied","Data":"eac74e8b6166373f528c8a194645c2208324c2e3868c2e168982a08455de07e7"} Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.630267 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.749587 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-dns-svc\") pod \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.749780 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-sb\") pod \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.749832 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-config\") pod \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.749866 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-984df\" (UniqueName: \"kubernetes.io/projected/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-kube-api-access-984df\") pod \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.749908 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-nb\") pod \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\" (UID: \"3cf53967-2da7-4356-a07a-8c7eaf0ff76a\") " Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.760506 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-kube-api-access-984df" (OuterVolumeSpecName: "kube-api-access-984df") pod "3cf53967-2da7-4356-a07a-8c7eaf0ff76a" (UID: "3cf53967-2da7-4356-a07a-8c7eaf0ff76a"). InnerVolumeSpecName "kube-api-access-984df". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.799603 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-config" (OuterVolumeSpecName: "config") pod "3cf53967-2da7-4356-a07a-8c7eaf0ff76a" (UID: "3cf53967-2da7-4356-a07a-8c7eaf0ff76a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.801038 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3cf53967-2da7-4356-a07a-8c7eaf0ff76a" (UID: "3cf53967-2da7-4356-a07a-8c7eaf0ff76a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.801404 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3cf53967-2da7-4356-a07a-8c7eaf0ff76a" (UID: "3cf53967-2da7-4356-a07a-8c7eaf0ff76a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.807881 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3cf53967-2da7-4356-a07a-8c7eaf0ff76a" (UID: "3cf53967-2da7-4356-a07a-8c7eaf0ff76a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.851490 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.851523 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.851535 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.851543 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-config\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:20 crc kubenswrapper[5081]: I1003 16:59:20.851551 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-984df\" (UniqueName: \"kubernetes.io/projected/3cf53967-2da7-4356-a07a-8c7eaf0ff76a-kube-api-access-984df\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:21 crc kubenswrapper[5081]: I1003 16:59:21.300037 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" event={"ID":"3cf53967-2da7-4356-a07a-8c7eaf0ff76a","Type":"ContainerDied","Data":"b0ac78504564744247d6da2ad0eba0f4e34f27bb1fec67973a4890f0fcd5ca0f"} Oct 03 16:59:21 crc kubenswrapper[5081]: I1003 16:59:21.300114 5081 scope.go:117] "RemoveContainer" containerID="eac74e8b6166373f528c8a194645c2208324c2e3868c2e168982a08455de07e7" Oct 03 16:59:21 crc kubenswrapper[5081]: I1003 16:59:21.300131 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6bf76467-6j7lf" Oct 03 16:59:21 crc kubenswrapper[5081]: I1003 16:59:21.319205 5081 scope.go:117] "RemoveContainer" containerID="20ca9827a7789a19019c3bde70031053b75be684e6d3d18672b1e1e6cd9a0b1a" Oct 03 16:59:21 crc kubenswrapper[5081]: I1003 16:59:21.342796 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f6bf76467-6j7lf"] Oct 03 16:59:21 crc kubenswrapper[5081]: I1003 16:59:21.349594 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f6bf76467-6j7lf"] Oct 03 16:59:21 crc kubenswrapper[5081]: I1003 16:59:21.840518 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" path="/var/lib/kubelet/pods/3cf53967-2da7-4356-a07a-8c7eaf0ff76a/volumes" Oct 03 16:59:27 crc kubenswrapper[5081]: E1003 16:59:27.143828 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode888ae0e_fada_4076_9f60_c20eaf243332.slice\": RecentStats: unable to find data in memory cache]" Oct 03 16:59:30 crc kubenswrapper[5081]: I1003 16:59:30.647666 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 16:59:30 crc kubenswrapper[5081]: I1003 16:59:30.648873 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.423929 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-25czj"] Oct 03 16:59:32 crc kubenswrapper[5081]: E1003 16:59:32.425136 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerName="dnsmasq-dns" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.425153 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerName="dnsmasq-dns" Oct 03 16:59:32 crc kubenswrapper[5081]: E1003 16:59:32.425188 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerName="init" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.425196 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerName="init" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.425756 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cf53967-2da7-4356-a07a-8c7eaf0ff76a" containerName="dnsmasq-dns" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.446815 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-25czj" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.453080 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-25czj"] Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.553117 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7xp4\" (UniqueName: \"kubernetes.io/projected/2f193389-e587-4337-ae08-a3ec10e49ff6-kube-api-access-x7xp4\") pod \"neutron-db-create-25czj\" (UID: \"2f193389-e587-4337-ae08-a3ec10e49ff6\") " pod="openstack/neutron-db-create-25czj" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.655144 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7xp4\" (UniqueName: \"kubernetes.io/projected/2f193389-e587-4337-ae08-a3ec10e49ff6-kube-api-access-x7xp4\") pod \"neutron-db-create-25czj\" (UID: \"2f193389-e587-4337-ae08-a3ec10e49ff6\") " pod="openstack/neutron-db-create-25czj" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.673235 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7xp4\" (UniqueName: \"kubernetes.io/projected/2f193389-e587-4337-ae08-a3ec10e49ff6-kube-api-access-x7xp4\") pod \"neutron-db-create-25czj\" (UID: \"2f193389-e587-4337-ae08-a3ec10e49ff6\") " pod="openstack/neutron-db-create-25czj" Oct 03 16:59:32 crc kubenswrapper[5081]: I1003 16:59:32.770023 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-25czj" Oct 03 16:59:33 crc kubenswrapper[5081]: I1003 16:59:33.201638 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-25czj"] Oct 03 16:59:33 crc kubenswrapper[5081]: I1003 16:59:33.457021 5081 generic.go:334] "Generic (PLEG): container finished" podID="2f193389-e587-4337-ae08-a3ec10e49ff6" containerID="3f2dd09e49fc5b94d12155a63b28164fc2c7ea42db1878861b3709bdf466eb9b" exitCode=0 Oct 03 16:59:33 crc kubenswrapper[5081]: I1003 16:59:33.457074 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-25czj" event={"ID":"2f193389-e587-4337-ae08-a3ec10e49ff6","Type":"ContainerDied","Data":"3f2dd09e49fc5b94d12155a63b28164fc2c7ea42db1878861b3709bdf466eb9b"} Oct 03 16:59:33 crc kubenswrapper[5081]: I1003 16:59:33.457323 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-25czj" event={"ID":"2f193389-e587-4337-ae08-a3ec10e49ff6","Type":"ContainerStarted","Data":"205e5faa012dca9cf77ac7a03426236fefc2eff47ac7af5ee59acdf140b91da9"} Oct 03 16:59:34 crc kubenswrapper[5081]: I1003 16:59:34.761734 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-25czj" Oct 03 16:59:34 crc kubenswrapper[5081]: I1003 16:59:34.892259 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7xp4\" (UniqueName: \"kubernetes.io/projected/2f193389-e587-4337-ae08-a3ec10e49ff6-kube-api-access-x7xp4\") pod \"2f193389-e587-4337-ae08-a3ec10e49ff6\" (UID: \"2f193389-e587-4337-ae08-a3ec10e49ff6\") " Oct 03 16:59:34 crc kubenswrapper[5081]: I1003 16:59:34.896784 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f193389-e587-4337-ae08-a3ec10e49ff6-kube-api-access-x7xp4" (OuterVolumeSpecName: "kube-api-access-x7xp4") pod "2f193389-e587-4337-ae08-a3ec10e49ff6" (UID: "2f193389-e587-4337-ae08-a3ec10e49ff6"). InnerVolumeSpecName "kube-api-access-x7xp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:59:34 crc kubenswrapper[5081]: I1003 16:59:34.994140 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7xp4\" (UniqueName: \"kubernetes.io/projected/2f193389-e587-4337-ae08-a3ec10e49ff6-kube-api-access-x7xp4\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:35 crc kubenswrapper[5081]: I1003 16:59:35.472531 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-25czj" event={"ID":"2f193389-e587-4337-ae08-a3ec10e49ff6","Type":"ContainerDied","Data":"205e5faa012dca9cf77ac7a03426236fefc2eff47ac7af5ee59acdf140b91da9"} Oct 03 16:59:35 crc kubenswrapper[5081]: I1003 16:59:35.472826 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="205e5faa012dca9cf77ac7a03426236fefc2eff47ac7af5ee59acdf140b91da9" Oct 03 16:59:35 crc kubenswrapper[5081]: I1003 16:59:35.472591 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-25czj" Oct 03 16:59:37 crc kubenswrapper[5081]: E1003 16:59:37.340775 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode888ae0e_fada_4076_9f60_c20eaf243332.slice\": RecentStats: unable to find data in memory cache]" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.497943 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7194-account-create-5pft9"] Oct 03 16:59:42 crc kubenswrapper[5081]: E1003 16:59:42.498816 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f193389-e587-4337-ae08-a3ec10e49ff6" containerName="mariadb-database-create" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.498831 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f193389-e587-4337-ae08-a3ec10e49ff6" containerName="mariadb-database-create" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.499011 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f193389-e587-4337-ae08-a3ec10e49ff6" containerName="mariadb-database-create" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.499724 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7194-account-create-5pft9" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.501645 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.507916 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7194-account-create-5pft9"] Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.617456 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cbzg\" (UniqueName: \"kubernetes.io/projected/92f050c3-8dc0-46b8-b399-a84d91b31398-kube-api-access-2cbzg\") pod \"neutron-7194-account-create-5pft9\" (UID: \"92f050c3-8dc0-46b8-b399-a84d91b31398\") " pod="openstack/neutron-7194-account-create-5pft9" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.719175 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cbzg\" (UniqueName: \"kubernetes.io/projected/92f050c3-8dc0-46b8-b399-a84d91b31398-kube-api-access-2cbzg\") pod \"neutron-7194-account-create-5pft9\" (UID: \"92f050c3-8dc0-46b8-b399-a84d91b31398\") " pod="openstack/neutron-7194-account-create-5pft9" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.736779 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cbzg\" (UniqueName: \"kubernetes.io/projected/92f050c3-8dc0-46b8-b399-a84d91b31398-kube-api-access-2cbzg\") pod \"neutron-7194-account-create-5pft9\" (UID: \"92f050c3-8dc0-46b8-b399-a84d91b31398\") " pod="openstack/neutron-7194-account-create-5pft9" Oct 03 16:59:42 crc kubenswrapper[5081]: I1003 16:59:42.819674 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7194-account-create-5pft9" Oct 03 16:59:43 crc kubenswrapper[5081]: I1003 16:59:43.267010 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7194-account-create-5pft9"] Oct 03 16:59:43 crc kubenswrapper[5081]: I1003 16:59:43.550238 5081 generic.go:334] "Generic (PLEG): container finished" podID="92f050c3-8dc0-46b8-b399-a84d91b31398" containerID="ce9b86c284fcfd2f798dcab1b7c5d0e54151ae875251be0230d8c0c496ad2330" exitCode=0 Oct 03 16:59:43 crc kubenswrapper[5081]: I1003 16:59:43.550283 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7194-account-create-5pft9" event={"ID":"92f050c3-8dc0-46b8-b399-a84d91b31398","Type":"ContainerDied","Data":"ce9b86c284fcfd2f798dcab1b7c5d0e54151ae875251be0230d8c0c496ad2330"} Oct 03 16:59:43 crc kubenswrapper[5081]: I1003 16:59:43.550329 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7194-account-create-5pft9" event={"ID":"92f050c3-8dc0-46b8-b399-a84d91b31398","Type":"ContainerStarted","Data":"2711603241ae722153f7b571bc16351aa54e69e52f95479ec855a3d73cdf60ba"} Oct 03 16:59:44 crc kubenswrapper[5081]: I1003 16:59:44.891393 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7194-account-create-5pft9" Oct 03 16:59:45 crc kubenswrapper[5081]: I1003 16:59:45.061577 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cbzg\" (UniqueName: \"kubernetes.io/projected/92f050c3-8dc0-46b8-b399-a84d91b31398-kube-api-access-2cbzg\") pod \"92f050c3-8dc0-46b8-b399-a84d91b31398\" (UID: \"92f050c3-8dc0-46b8-b399-a84d91b31398\") " Oct 03 16:59:45 crc kubenswrapper[5081]: I1003 16:59:45.073107 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92f050c3-8dc0-46b8-b399-a84d91b31398-kube-api-access-2cbzg" (OuterVolumeSpecName: "kube-api-access-2cbzg") pod "92f050c3-8dc0-46b8-b399-a84d91b31398" (UID: "92f050c3-8dc0-46b8-b399-a84d91b31398"). InnerVolumeSpecName "kube-api-access-2cbzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:59:45 crc kubenswrapper[5081]: I1003 16:59:45.163254 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cbzg\" (UniqueName: \"kubernetes.io/projected/92f050c3-8dc0-46b8-b399-a84d91b31398-kube-api-access-2cbzg\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:45 crc kubenswrapper[5081]: I1003 16:59:45.567358 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7194-account-create-5pft9" event={"ID":"92f050c3-8dc0-46b8-b399-a84d91b31398","Type":"ContainerDied","Data":"2711603241ae722153f7b571bc16351aa54e69e52f95479ec855a3d73cdf60ba"} Oct 03 16:59:45 crc kubenswrapper[5081]: I1003 16:59:45.567431 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2711603241ae722153f7b571bc16351aa54e69e52f95479ec855a3d73cdf60ba" Oct 03 16:59:45 crc kubenswrapper[5081]: I1003 16:59:45.567449 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7194-account-create-5pft9" Oct 03 16:59:47 crc kubenswrapper[5081]: E1003 16:59:47.552185 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode888ae0e_fada_4076_9f60_c20eaf243332.slice\": RecentStats: unable to find data in memory cache]" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.638492 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-zrzdr"] Oct 03 16:59:47 crc kubenswrapper[5081]: E1003 16:59:47.639183 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f050c3-8dc0-46b8-b399-a84d91b31398" containerName="mariadb-account-create" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.639262 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f050c3-8dc0-46b8-b399-a84d91b31398" containerName="mariadb-account-create" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.639491 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f050c3-8dc0-46b8-b399-a84d91b31398" containerName="mariadb-account-create" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.640223 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.645455 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.645701 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.645840 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-hvfnw" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.649910 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zrzdr"] Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.706483 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsfh5\" (UniqueName: \"kubernetes.io/projected/0adfd05f-b2bb-4e10-b343-54fb084d6e73-kube-api-access-tsfh5\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.706858 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-config\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.706991 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-combined-ca-bundle\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.808722 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsfh5\" (UniqueName: \"kubernetes.io/projected/0adfd05f-b2bb-4e10-b343-54fb084d6e73-kube-api-access-tsfh5\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.808789 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-config\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.808811 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-combined-ca-bundle\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.815463 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-config\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.822392 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-combined-ca-bundle\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.831925 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsfh5\" (UniqueName: \"kubernetes.io/projected/0adfd05f-b2bb-4e10-b343-54fb084d6e73-kube-api-access-tsfh5\") pod \"neutron-db-sync-zrzdr\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:47 crc kubenswrapper[5081]: I1003 16:59:47.968034 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:48 crc kubenswrapper[5081]: I1003 16:59:48.397827 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zrzdr"] Oct 03 16:59:48 crc kubenswrapper[5081]: W1003 16:59:48.403017 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0adfd05f_b2bb_4e10_b343_54fb084d6e73.slice/crio-8f3efe7dc61ec0ef56ef6ec581aa8888121e95c4dd28a8ac7ad9683263e7a015 WatchSource:0}: Error finding container 8f3efe7dc61ec0ef56ef6ec581aa8888121e95c4dd28a8ac7ad9683263e7a015: Status 404 returned error can't find the container with id 8f3efe7dc61ec0ef56ef6ec581aa8888121e95c4dd28a8ac7ad9683263e7a015 Oct 03 16:59:48 crc kubenswrapper[5081]: I1003 16:59:48.591082 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zrzdr" event={"ID":"0adfd05f-b2bb-4e10-b343-54fb084d6e73","Type":"ContainerStarted","Data":"cf2587229e62133cd001c332432fccd4d5f671c55b45a6302353e75452b7ee4f"} Oct 03 16:59:48 crc kubenswrapper[5081]: I1003 16:59:48.591334 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zrzdr" event={"ID":"0adfd05f-b2bb-4e10-b343-54fb084d6e73","Type":"ContainerStarted","Data":"8f3efe7dc61ec0ef56ef6ec581aa8888121e95c4dd28a8ac7ad9683263e7a015"} Oct 03 16:59:48 crc kubenswrapper[5081]: I1003 16:59:48.606886 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-zrzdr" podStartSLOduration=1.606862512 podStartE2EDuration="1.606862512s" podCreationTimestamp="2025-10-03 16:59:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:48.604970457 +0000 UTC m=+5507.570527070" watchObservedRunningTime="2025-10-03 16:59:48.606862512 +0000 UTC m=+5507.572419125" Oct 03 16:59:53 crc kubenswrapper[5081]: I1003 16:59:53.628631 5081 generic.go:334] "Generic (PLEG): container finished" podID="0adfd05f-b2bb-4e10-b343-54fb084d6e73" containerID="cf2587229e62133cd001c332432fccd4d5f671c55b45a6302353e75452b7ee4f" exitCode=0 Oct 03 16:59:53 crc kubenswrapper[5081]: I1003 16:59:53.628743 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zrzdr" event={"ID":"0adfd05f-b2bb-4e10-b343-54fb084d6e73","Type":"ContainerDied","Data":"cf2587229e62133cd001c332432fccd4d5f671c55b45a6302353e75452b7ee4f"} Oct 03 16:59:54 crc kubenswrapper[5081]: I1003 16:59:54.927227 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.019628 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-combined-ca-bundle\") pod \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.019686 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsfh5\" (UniqueName: \"kubernetes.io/projected/0adfd05f-b2bb-4e10-b343-54fb084d6e73-kube-api-access-tsfh5\") pod \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.019771 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-config\") pod \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\" (UID: \"0adfd05f-b2bb-4e10-b343-54fb084d6e73\") " Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.025291 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0adfd05f-b2bb-4e10-b343-54fb084d6e73-kube-api-access-tsfh5" (OuterVolumeSpecName: "kube-api-access-tsfh5") pod "0adfd05f-b2bb-4e10-b343-54fb084d6e73" (UID: "0adfd05f-b2bb-4e10-b343-54fb084d6e73"). InnerVolumeSpecName "kube-api-access-tsfh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.042159 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0adfd05f-b2bb-4e10-b343-54fb084d6e73" (UID: "0adfd05f-b2bb-4e10-b343-54fb084d6e73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.044537 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-config" (OuterVolumeSpecName: "config") pod "0adfd05f-b2bb-4e10-b343-54fb084d6e73" (UID: "0adfd05f-b2bb-4e10-b343-54fb084d6e73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.121309 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.121345 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsfh5\" (UniqueName: \"kubernetes.io/projected/0adfd05f-b2bb-4e10-b343-54fb084d6e73-kube-api-access-tsfh5\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.121357 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/0adfd05f-b2bb-4e10-b343-54fb084d6e73-config\") on node \"crc\" DevicePath \"\"" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.646148 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zrzdr" event={"ID":"0adfd05f-b2bb-4e10-b343-54fb084d6e73","Type":"ContainerDied","Data":"8f3efe7dc61ec0ef56ef6ec581aa8888121e95c4dd28a8ac7ad9683263e7a015"} Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.646187 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f3efe7dc61ec0ef56ef6ec581aa8888121e95c4dd28a8ac7ad9683263e7a015" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.646220 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zrzdr" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.872586 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f68ccf757-t6khk"] Oct 03 16:59:55 crc kubenswrapper[5081]: E1003 16:59:55.873517 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0adfd05f-b2bb-4e10-b343-54fb084d6e73" containerName="neutron-db-sync" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.873540 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0adfd05f-b2bb-4e10-b343-54fb084d6e73" containerName="neutron-db-sync" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.873881 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0adfd05f-b2bb-4e10-b343-54fb084d6e73" containerName="neutron-db-sync" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.877759 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:55 crc kubenswrapper[5081]: I1003 16:59:55.887530 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f68ccf757-t6khk"] Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.025463 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-66bff68f7c-cm8v6"] Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.027231 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.030033 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.030143 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.030038 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-hvfnw" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.035127 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-nb\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.035212 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-dns-svc\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.035264 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhrht\" (UniqueName: \"kubernetes.io/projected/71f04f9c-87b6-49ea-bd69-fc055af47341-kube-api-access-xhrht\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.035295 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-config\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.035381 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-sb\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.050441 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66bff68f7c-cm8v6"] Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.137315 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-nb\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.137396 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-config\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.137448 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-dns-svc\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.137489 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhrht\" (UniqueName: \"kubernetes.io/projected/71f04f9c-87b6-49ea-bd69-fc055af47341-kube-api-access-xhrht\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.137530 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-config\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.138285 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9hkr\" (UniqueName: \"kubernetes.io/projected/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-kube-api-access-f9hkr\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.138335 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-httpd-config\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.138389 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-sb\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.138504 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-combined-ca-bundle\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.139128 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-dns-svc\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.139154 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-config\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.139321 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-sb\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.139769 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-nb\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.155501 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhrht\" (UniqueName: \"kubernetes.io/projected/71f04f9c-87b6-49ea-bd69-fc055af47341-kube-api-access-xhrht\") pod \"dnsmasq-dns-5f68ccf757-t6khk\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.196005 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.240447 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-combined-ca-bundle\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.241449 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-config\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.241513 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-httpd-config\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.241530 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9hkr\" (UniqueName: \"kubernetes.io/projected/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-kube-api-access-f9hkr\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.246495 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-combined-ca-bundle\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.246552 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-httpd-config\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.260219 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9hkr\" (UniqueName: \"kubernetes.io/projected/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-kube-api-access-f9hkr\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.262421 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3cbb4cae-c663-4fbe-91b1-3bdc89f48b83-config\") pod \"neutron-66bff68f7c-cm8v6\" (UID: \"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83\") " pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.349553 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.659283 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f68ccf757-t6khk"] Oct 03 16:59:56 crc kubenswrapper[5081]: I1003 16:59:56.889035 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66bff68f7c-cm8v6"] Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.668627 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66bff68f7c-cm8v6" event={"ID":"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83","Type":"ContainerStarted","Data":"3ed76b9fdf3d33ff6b13bd5fd161fed156dbbc98f6bc92dcd06153f6087bb8b5"} Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.669116 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66bff68f7c-cm8v6" event={"ID":"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83","Type":"ContainerStarted","Data":"a81d1d34f454e956a091ebd8df39a172729301cb4ed63991bb3ab5f044106b0d"} Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.669135 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66bff68f7c-cm8v6" event={"ID":"3cbb4cae-c663-4fbe-91b1-3bdc89f48b83","Type":"ContainerStarted","Data":"38890a59737b90eee294d3ed44005df1d15be48f7126a5e8c1a9ea8b69e09fde"} Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.669220 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.671513 5081 generic.go:334] "Generic (PLEG): container finished" podID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerID="20bd3746003b8df5c92ec1f57bd7cc70c364306450c882c64283bf684bb2d5c0" exitCode=0 Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.671540 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" event={"ID":"71f04f9c-87b6-49ea-bd69-fc055af47341","Type":"ContainerDied","Data":"20bd3746003b8df5c92ec1f57bd7cc70c364306450c882c64283bf684bb2d5c0"} Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.671726 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" event={"ID":"71f04f9c-87b6-49ea-bd69-fc055af47341","Type":"ContainerStarted","Data":"dd86f5e1c05549fb63a00a6bd71b45252ef5a0b426da3b0c34d852ca6588a908"} Oct 03 16:59:57 crc kubenswrapper[5081]: I1003 16:59:57.700315 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-66bff68f7c-cm8v6" podStartSLOduration=2.700292713 podStartE2EDuration="2.700292713s" podCreationTimestamp="2025-10-03 16:59:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:57.696081032 +0000 UTC m=+5516.661637655" watchObservedRunningTime="2025-10-03 16:59:57.700292713 +0000 UTC m=+5516.665849326" Oct 03 16:59:57 crc kubenswrapper[5081]: E1003 16:59:57.802001 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode888ae0e_fada_4076_9f60_c20eaf243332.slice\": RecentStats: unable to find data in memory cache]" Oct 03 16:59:58 crc kubenswrapper[5081]: I1003 16:59:58.709907 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" event={"ID":"71f04f9c-87b6-49ea-bd69-fc055af47341","Type":"ContainerStarted","Data":"5f2ff17e5d38c501d6ffc163fbae0338020c6ccc012deb306f3901dbcae315e4"} Oct 03 16:59:58 crc kubenswrapper[5081]: I1003 16:59:58.711686 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 16:59:58 crc kubenswrapper[5081]: I1003 16:59:58.735601 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" podStartSLOduration=3.735584696 podStartE2EDuration="3.735584696s" podCreationTimestamp="2025-10-03 16:59:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 16:59:58.728589354 +0000 UTC m=+5517.694145977" watchObservedRunningTime="2025-10-03 16:59:58.735584696 +0000 UTC m=+5517.701141299" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.143694 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5"] Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.145403 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.147698 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.148238 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.152548 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5"] Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.207452 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7fcb380a-2041-4c85-8746-9d5755da5e86-secret-volume\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.207507 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xpgs\" (UniqueName: \"kubernetes.io/projected/7fcb380a-2041-4c85-8746-9d5755da5e86-kube-api-access-5xpgs\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.207792 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7fcb380a-2041-4c85-8746-9d5755da5e86-config-volume\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.309847 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7fcb380a-2041-4c85-8746-9d5755da5e86-secret-volume\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.309900 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xpgs\" (UniqueName: \"kubernetes.io/projected/7fcb380a-2041-4c85-8746-9d5755da5e86-kube-api-access-5xpgs\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.309986 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7fcb380a-2041-4c85-8746-9d5755da5e86-config-volume\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.310881 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7fcb380a-2041-4c85-8746-9d5755da5e86-config-volume\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.315912 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7fcb380a-2041-4c85-8746-9d5755da5e86-secret-volume\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.330264 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xpgs\" (UniqueName: \"kubernetes.io/projected/7fcb380a-2041-4c85-8746-9d5755da5e86-kube-api-access-5xpgs\") pod \"collect-profiles-29325180-dshw5\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.464767 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.585274 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vhtst"] Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.587129 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.605936 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhtst"] Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.618117 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m268z\" (UniqueName: \"kubernetes.io/projected/7491b5a1-3494-4569-a8e6-bf6cd5f89728-kube-api-access-m268z\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.618308 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-catalog-content\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.618373 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-utilities\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.647156 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.647526 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.647623 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.651943 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8921388ca71753ab3c7b893bdb2f477facaefffa8aca97d42c54c24634476e41"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.652012 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://8921388ca71753ab3c7b893bdb2f477facaefffa8aca97d42c54c24634476e41" gracePeriod=600 Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.721539 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-catalog-content\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.721629 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-utilities\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.721761 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m268z\" (UniqueName: \"kubernetes.io/projected/7491b5a1-3494-4569-a8e6-bf6cd5f89728-kube-api-access-m268z\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.723027 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-catalog-content\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.723389 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-utilities\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.746676 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m268z\" (UniqueName: \"kubernetes.io/projected/7491b5a1-3494-4569-a8e6-bf6cd5f89728-kube-api-access-m268z\") pod \"redhat-marketplace-vhtst\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.907307 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:00 crc kubenswrapper[5081]: I1003 17:00:00.969931 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5"] Oct 03 17:00:00 crc kubenswrapper[5081]: W1003 17:00:00.985591 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fcb380a_2041_4c85_8746_9d5755da5e86.slice/crio-2dc9009b6b50892f9d77d11601d5bd8151180993dfcac3fab5c237618292c011 WatchSource:0}: Error finding container 2dc9009b6b50892f9d77d11601d5bd8151180993dfcac3fab5c237618292c011: Status 404 returned error can't find the container with id 2dc9009b6b50892f9d77d11601d5bd8151180993dfcac3fab5c237618292c011 Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.370783 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhtst"] Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.742960 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="8921388ca71753ab3c7b893bdb2f477facaefffa8aca97d42c54c24634476e41" exitCode=0 Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.743032 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"8921388ca71753ab3c7b893bdb2f477facaefffa8aca97d42c54c24634476e41"} Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.743062 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3"} Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.743079 5081 scope.go:117] "RemoveContainer" containerID="e665ea07af3066626ed373cda66686b900bfcbbee0507b2e84050fb4c6e0beb9" Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.746552 5081 generic.go:334] "Generic (PLEG): container finished" podID="7fcb380a-2041-4c85-8746-9d5755da5e86" containerID="1f06df2ff0a4510184ceeb1e5cf00f3a466b8cb645f956dc10a151150e191a27" exitCode=0 Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.746630 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" event={"ID":"7fcb380a-2041-4c85-8746-9d5755da5e86","Type":"ContainerDied","Data":"1f06df2ff0a4510184ceeb1e5cf00f3a466b8cb645f956dc10a151150e191a27"} Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.746706 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" event={"ID":"7fcb380a-2041-4c85-8746-9d5755da5e86","Type":"ContainerStarted","Data":"2dc9009b6b50892f9d77d11601d5bd8151180993dfcac3fab5c237618292c011"} Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.748299 5081 generic.go:334] "Generic (PLEG): container finished" podID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerID="34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df" exitCode=0 Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.748336 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhtst" event={"ID":"7491b5a1-3494-4569-a8e6-bf6cd5f89728","Type":"ContainerDied","Data":"34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df"} Oct 03 17:00:01 crc kubenswrapper[5081]: I1003 17:00:01.748361 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhtst" event={"ID":"7491b5a1-3494-4569-a8e6-bf6cd5f89728","Type":"ContainerStarted","Data":"d1cf2d05d1e1da4f2d027be90a5cdda0919eb1cdf90e1e60fc6db48bb435b13f"} Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.103534 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.260141 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7fcb380a-2041-4c85-8746-9d5755da5e86-config-volume\") pod \"7fcb380a-2041-4c85-8746-9d5755da5e86\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.260807 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fcb380a-2041-4c85-8746-9d5755da5e86-config-volume" (OuterVolumeSpecName: "config-volume") pod "7fcb380a-2041-4c85-8746-9d5755da5e86" (UID: "7fcb380a-2041-4c85-8746-9d5755da5e86"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.261467 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7fcb380a-2041-4c85-8746-9d5755da5e86-secret-volume\") pod \"7fcb380a-2041-4c85-8746-9d5755da5e86\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.261661 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xpgs\" (UniqueName: \"kubernetes.io/projected/7fcb380a-2041-4c85-8746-9d5755da5e86-kube-api-access-5xpgs\") pod \"7fcb380a-2041-4c85-8746-9d5755da5e86\" (UID: \"7fcb380a-2041-4c85-8746-9d5755da5e86\") " Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.262218 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7fcb380a-2041-4c85-8746-9d5755da5e86-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.266573 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fcb380a-2041-4c85-8746-9d5755da5e86-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7fcb380a-2041-4c85-8746-9d5755da5e86" (UID: "7fcb380a-2041-4c85-8746-9d5755da5e86"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.266970 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fcb380a-2041-4c85-8746-9d5755da5e86-kube-api-access-5xpgs" (OuterVolumeSpecName: "kube-api-access-5xpgs") pod "7fcb380a-2041-4c85-8746-9d5755da5e86" (UID: "7fcb380a-2041-4c85-8746-9d5755da5e86"). InnerVolumeSpecName "kube-api-access-5xpgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.363926 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7fcb380a-2041-4c85-8746-9d5755da5e86-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.364234 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xpgs\" (UniqueName: \"kubernetes.io/projected/7fcb380a-2041-4c85-8746-9d5755da5e86-kube-api-access-5xpgs\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.770629 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" event={"ID":"7fcb380a-2041-4c85-8746-9d5755da5e86","Type":"ContainerDied","Data":"2dc9009b6b50892f9d77d11601d5bd8151180993dfcac3fab5c237618292c011"} Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.770634 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.770667 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dc9009b6b50892f9d77d11601d5bd8151180993dfcac3fab5c237618292c011" Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.772594 5081 generic.go:334] "Generic (PLEG): container finished" podID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerID="3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f" exitCode=0 Oct 03 17:00:03 crc kubenswrapper[5081]: I1003 17:00:03.772633 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhtst" event={"ID":"7491b5a1-3494-4569-a8e6-bf6cd5f89728","Type":"ContainerDied","Data":"3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f"} Oct 03 17:00:04 crc kubenswrapper[5081]: I1003 17:00:04.181361 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx"] Oct 03 17:00:04 crc kubenswrapper[5081]: I1003 17:00:04.187515 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325135-xdsgx"] Oct 03 17:00:05 crc kubenswrapper[5081]: I1003 17:00:05.790840 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhtst" event={"ID":"7491b5a1-3494-4569-a8e6-bf6cd5f89728","Type":"ContainerStarted","Data":"7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc"} Oct 03 17:00:05 crc kubenswrapper[5081]: I1003 17:00:05.815656 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vhtst" podStartSLOduration=2.783540774 podStartE2EDuration="5.815635068s" podCreationTimestamp="2025-10-03 17:00:00 +0000 UTC" firstStartedPulling="2025-10-03 17:00:01.749764335 +0000 UTC m=+5520.715320948" lastFinishedPulling="2025-10-03 17:00:04.781858609 +0000 UTC m=+5523.747415242" observedRunningTime="2025-10-03 17:00:05.806458244 +0000 UTC m=+5524.772014857" watchObservedRunningTime="2025-10-03 17:00:05.815635068 +0000 UTC m=+5524.781191681" Oct 03 17:00:05 crc kubenswrapper[5081]: I1003 17:00:05.838114 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c534ff6f-e775-430d-a9e8-9696361bb3f9" path="/var/lib/kubelet/pods/c534ff6f-e775-430d-a9e8-9696361bb3f9/volumes" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.197836 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.243601 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f7b6f7cd7-l49t9"] Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.243871 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" podUID="70568631-364c-432d-ac9f-4c6d2a843b83" containerName="dnsmasq-dns" containerID="cri-o://6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2" gracePeriod=10 Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.736912 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.800469 5081 generic.go:334] "Generic (PLEG): container finished" podID="70568631-364c-432d-ac9f-4c6d2a843b83" containerID="6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2" exitCode=0 Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.800521 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" event={"ID":"70568631-364c-432d-ac9f-4c6d2a843b83","Type":"ContainerDied","Data":"6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2"} Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.800566 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.800579 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f7b6f7cd7-l49t9" event={"ID":"70568631-364c-432d-ac9f-4c6d2a843b83","Type":"ContainerDied","Data":"88c09840d2d97029781ab1850c65e4cba9477827898243ad1df17f5718430bb8"} Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.800604 5081 scope.go:117] "RemoveContainer" containerID="6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.824019 5081 scope.go:117] "RemoveContainer" containerID="04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.848855 5081 scope.go:117] "RemoveContainer" containerID="6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2" Oct 03 17:00:06 crc kubenswrapper[5081]: E1003 17:00:06.849408 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2\": container with ID starting with 6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2 not found: ID does not exist" containerID="6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.849469 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2"} err="failed to get container status \"6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2\": rpc error: code = NotFound desc = could not find container \"6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2\": container with ID starting with 6c19875e0daf755b0a1c24e83ff812f3ef0334c33ce48f98aacdc14684588bd2 not found: ID does not exist" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.849506 5081 scope.go:117] "RemoveContainer" containerID="04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3" Oct 03 17:00:06 crc kubenswrapper[5081]: E1003 17:00:06.850624 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3\": container with ID starting with 04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3 not found: ID does not exist" containerID="04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.850710 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3"} err="failed to get container status \"04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3\": rpc error: code = NotFound desc = could not find container \"04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3\": container with ID starting with 04bb537f8f1561523fe68022812272082a9a892fb93a3e5da4c7cdb6c20ee7d3 not found: ID does not exist" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.926476 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-sb\") pod \"70568631-364c-432d-ac9f-4c6d2a843b83\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.926732 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-config\") pod \"70568631-364c-432d-ac9f-4c6d2a843b83\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.926756 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-nb\") pod \"70568631-364c-432d-ac9f-4c6d2a843b83\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.926827 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjc54\" (UniqueName: \"kubernetes.io/projected/70568631-364c-432d-ac9f-4c6d2a843b83-kube-api-access-xjc54\") pod \"70568631-364c-432d-ac9f-4c6d2a843b83\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.926881 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-dns-svc\") pod \"70568631-364c-432d-ac9f-4c6d2a843b83\" (UID: \"70568631-364c-432d-ac9f-4c6d2a843b83\") " Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.937347 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70568631-364c-432d-ac9f-4c6d2a843b83-kube-api-access-xjc54" (OuterVolumeSpecName: "kube-api-access-xjc54") pod "70568631-364c-432d-ac9f-4c6d2a843b83" (UID: "70568631-364c-432d-ac9f-4c6d2a843b83"). InnerVolumeSpecName "kube-api-access-xjc54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.976985 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "70568631-364c-432d-ac9f-4c6d2a843b83" (UID: "70568631-364c-432d-ac9f-4c6d2a843b83"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.979131 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-config" (OuterVolumeSpecName: "config") pod "70568631-364c-432d-ac9f-4c6d2a843b83" (UID: "70568631-364c-432d-ac9f-4c6d2a843b83"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.981175 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "70568631-364c-432d-ac9f-4c6d2a843b83" (UID: "70568631-364c-432d-ac9f-4c6d2a843b83"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:00:06 crc kubenswrapper[5081]: I1003 17:00:06.984748 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "70568631-364c-432d-ac9f-4c6d2a843b83" (UID: "70568631-364c-432d-ac9f-4c6d2a843b83"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.029125 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.029162 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.029176 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjc54\" (UniqueName: \"kubernetes.io/projected/70568631-364c-432d-ac9f-4c6d2a843b83-kube-api-access-xjc54\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.029187 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.029200 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70568631-364c-432d-ac9f-4c6d2a843b83-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.138336 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f7b6f7cd7-l49t9"] Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.145532 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f7b6f7cd7-l49t9"] Oct 03 17:00:07 crc kubenswrapper[5081]: I1003 17:00:07.840277 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70568631-364c-432d-ac9f-4c6d2a843b83" path="/var/lib/kubelet/pods/70568631-364c-432d-ac9f-4c6d2a843b83/volumes" Oct 03 17:00:10 crc kubenswrapper[5081]: I1003 17:00:10.907869 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:10 crc kubenswrapper[5081]: I1003 17:00:10.908491 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:10 crc kubenswrapper[5081]: I1003 17:00:10.947444 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:11 crc kubenswrapper[5081]: I1003 17:00:11.939425 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:11 crc kubenswrapper[5081]: I1003 17:00:11.984687 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhtst"] Oct 03 17:00:13 crc kubenswrapper[5081]: I1003 17:00:13.899789 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vhtst" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="registry-server" containerID="cri-o://7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc" gracePeriod=2 Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.362842 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.461214 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m268z\" (UniqueName: \"kubernetes.io/projected/7491b5a1-3494-4569-a8e6-bf6cd5f89728-kube-api-access-m268z\") pod \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.462943 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-utilities\") pod \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.463095 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-catalog-content\") pod \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\" (UID: \"7491b5a1-3494-4569-a8e6-bf6cd5f89728\") " Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.463760 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-utilities" (OuterVolumeSpecName: "utilities") pod "7491b5a1-3494-4569-a8e6-bf6cd5f89728" (UID: "7491b5a1-3494-4569-a8e6-bf6cd5f89728"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.468443 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7491b5a1-3494-4569-a8e6-bf6cd5f89728-kube-api-access-m268z" (OuterVolumeSpecName: "kube-api-access-m268z") pod "7491b5a1-3494-4569-a8e6-bf6cd5f89728" (UID: "7491b5a1-3494-4569-a8e6-bf6cd5f89728"). InnerVolumeSpecName "kube-api-access-m268z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.473244 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m268z\" (UniqueName: \"kubernetes.io/projected/7491b5a1-3494-4569-a8e6-bf6cd5f89728-kube-api-access-m268z\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.473502 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.480677 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7491b5a1-3494-4569-a8e6-bf6cd5f89728" (UID: "7491b5a1-3494-4569-a8e6-bf6cd5f89728"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.574957 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7491b5a1-3494-4569-a8e6-bf6cd5f89728-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.916610 5081 generic.go:334] "Generic (PLEG): container finished" podID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerID="7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc" exitCode=0 Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.917658 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhtst" event={"ID":"7491b5a1-3494-4569-a8e6-bf6cd5f89728","Type":"ContainerDied","Data":"7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc"} Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.917830 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhtst" event={"ID":"7491b5a1-3494-4569-a8e6-bf6cd5f89728","Type":"ContainerDied","Data":"d1cf2d05d1e1da4f2d027be90a5cdda0919eb1cdf90e1e60fc6db48bb435b13f"} Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.917952 5081 scope.go:117] "RemoveContainer" containerID="7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.918289 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhtst" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.939305 5081 scope.go:117] "RemoveContainer" containerID="3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.962875 5081 scope.go:117] "RemoveContainer" containerID="34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df" Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.964920 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhtst"] Oct 03 17:00:14 crc kubenswrapper[5081]: I1003 17:00:14.979544 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhtst"] Oct 03 17:00:15 crc kubenswrapper[5081]: I1003 17:00:15.000084 5081 scope.go:117] "RemoveContainer" containerID="7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc" Oct 03 17:00:15 crc kubenswrapper[5081]: E1003 17:00:15.000949 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc\": container with ID starting with 7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc not found: ID does not exist" containerID="7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc" Oct 03 17:00:15 crc kubenswrapper[5081]: I1003 17:00:15.000981 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc"} err="failed to get container status \"7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc\": rpc error: code = NotFound desc = could not find container \"7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc\": container with ID starting with 7df899c4a6147b0286279dfa6a9c4721bc5ba97c5c1c7b83e1abae9c7f0dcddc not found: ID does not exist" Oct 03 17:00:15 crc kubenswrapper[5081]: I1003 17:00:15.001002 5081 scope.go:117] "RemoveContainer" containerID="3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f" Oct 03 17:00:15 crc kubenswrapper[5081]: E1003 17:00:15.001238 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f\": container with ID starting with 3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f not found: ID does not exist" containerID="3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f" Oct 03 17:00:15 crc kubenswrapper[5081]: I1003 17:00:15.001261 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f"} err="failed to get container status \"3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f\": rpc error: code = NotFound desc = could not find container \"3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f\": container with ID starting with 3bb3a7de09aa05d78af2cc1c6fd03ca2196e579aeabea794ce94483b7c5d208f not found: ID does not exist" Oct 03 17:00:15 crc kubenswrapper[5081]: I1003 17:00:15.001275 5081 scope.go:117] "RemoveContainer" containerID="34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df" Oct 03 17:00:15 crc kubenswrapper[5081]: E1003 17:00:15.001632 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df\": container with ID starting with 34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df not found: ID does not exist" containerID="34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df" Oct 03 17:00:15 crc kubenswrapper[5081]: I1003 17:00:15.001653 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df"} err="failed to get container status \"34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df\": rpc error: code = NotFound desc = could not find container \"34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df\": container with ID starting with 34b63b7942f344a8dd8df7d63495d3f50e3a0b8391a661dcc1a42d605e1fa5df not found: ID does not exist" Oct 03 17:00:15 crc kubenswrapper[5081]: I1003 17:00:15.837697 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" path="/var/lib/kubelet/pods/7491b5a1-3494-4569-a8e6-bf6cd5f89728/volumes" Oct 03 17:00:22 crc kubenswrapper[5081]: I1003 17:00:22.855299 5081 scope.go:117] "RemoveContainer" containerID="236b4a712df095b08f2943b8a9693cb096accd6cfe4e3f84820245bd639126fa" Oct 03 17:00:26 crc kubenswrapper[5081]: I1003 17:00:26.358872 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-66bff68f7c-cm8v6" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.996690 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-k4rrp"] Oct 03 17:00:33 crc kubenswrapper[5081]: E1003 17:00:33.997577 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="extract-utilities" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997592 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="extract-utilities" Oct 03 17:00:33 crc kubenswrapper[5081]: E1003 17:00:33.997622 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70568631-364c-432d-ac9f-4c6d2a843b83" containerName="init" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997628 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="70568631-364c-432d-ac9f-4c6d2a843b83" containerName="init" Oct 03 17:00:33 crc kubenswrapper[5081]: E1003 17:00:33.997636 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="registry-server" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997642 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="registry-server" Oct 03 17:00:33 crc kubenswrapper[5081]: E1003 17:00:33.997656 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70568631-364c-432d-ac9f-4c6d2a843b83" containerName="dnsmasq-dns" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997662 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="70568631-364c-432d-ac9f-4c6d2a843b83" containerName="dnsmasq-dns" Oct 03 17:00:33 crc kubenswrapper[5081]: E1003 17:00:33.997671 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="extract-content" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997677 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="extract-content" Oct 03 17:00:33 crc kubenswrapper[5081]: E1003 17:00:33.997690 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fcb380a-2041-4c85-8746-9d5755da5e86" containerName="collect-profiles" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997696 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fcb380a-2041-4c85-8746-9d5755da5e86" containerName="collect-profiles" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997847 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fcb380a-2041-4c85-8746-9d5755da5e86" containerName="collect-profiles" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997865 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="70568631-364c-432d-ac9f-4c6d2a843b83" containerName="dnsmasq-dns" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.997878 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7491b5a1-3494-4569-a8e6-bf6cd5f89728" containerName="registry-server" Oct 03 17:00:33 crc kubenswrapper[5081]: I1003 17:00:33.998501 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k4rrp" Oct 03 17:00:34 crc kubenswrapper[5081]: I1003 17:00:34.008349 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k4rrp"] Oct 03 17:00:34 crc kubenswrapper[5081]: I1003 17:00:34.099695 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv6dm\" (UniqueName: \"kubernetes.io/projected/06474819-0ce0-4b8d-9cf9-460b4c452103-kube-api-access-cv6dm\") pod \"glance-db-create-k4rrp\" (UID: \"06474819-0ce0-4b8d-9cf9-460b4c452103\") " pod="openstack/glance-db-create-k4rrp" Oct 03 17:00:34 crc kubenswrapper[5081]: I1003 17:00:34.201608 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv6dm\" (UniqueName: \"kubernetes.io/projected/06474819-0ce0-4b8d-9cf9-460b4c452103-kube-api-access-cv6dm\") pod \"glance-db-create-k4rrp\" (UID: \"06474819-0ce0-4b8d-9cf9-460b4c452103\") " pod="openstack/glance-db-create-k4rrp" Oct 03 17:00:34 crc kubenswrapper[5081]: I1003 17:00:34.219551 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv6dm\" (UniqueName: \"kubernetes.io/projected/06474819-0ce0-4b8d-9cf9-460b4c452103-kube-api-access-cv6dm\") pod \"glance-db-create-k4rrp\" (UID: \"06474819-0ce0-4b8d-9cf9-460b4c452103\") " pod="openstack/glance-db-create-k4rrp" Oct 03 17:00:34 crc kubenswrapper[5081]: I1003 17:00:34.314528 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k4rrp" Oct 03 17:00:34 crc kubenswrapper[5081]: I1003 17:00:34.776681 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k4rrp"] Oct 03 17:00:34 crc kubenswrapper[5081]: W1003 17:00:34.787774 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06474819_0ce0_4b8d_9cf9_460b4c452103.slice/crio-f05f170dc0ecf9a1585fb22255e77d1cb4ce43f88de3335a8eae5fada126ee71 WatchSource:0}: Error finding container f05f170dc0ecf9a1585fb22255e77d1cb4ce43f88de3335a8eae5fada126ee71: Status 404 returned error can't find the container with id f05f170dc0ecf9a1585fb22255e77d1cb4ce43f88de3335a8eae5fada126ee71 Oct 03 17:00:35 crc kubenswrapper[5081]: I1003 17:00:35.109937 5081 generic.go:334] "Generic (PLEG): container finished" podID="06474819-0ce0-4b8d-9cf9-460b4c452103" containerID="c8af8c6d66b048793ea9902bd1071f10ac1988204d6f3b8d682ff5f014765f07" exitCode=0 Oct 03 17:00:35 crc kubenswrapper[5081]: I1003 17:00:35.109970 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k4rrp" event={"ID":"06474819-0ce0-4b8d-9cf9-460b4c452103","Type":"ContainerDied","Data":"c8af8c6d66b048793ea9902bd1071f10ac1988204d6f3b8d682ff5f014765f07"} Oct 03 17:00:35 crc kubenswrapper[5081]: I1003 17:00:35.110025 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k4rrp" event={"ID":"06474819-0ce0-4b8d-9cf9-460b4c452103","Type":"ContainerStarted","Data":"f05f170dc0ecf9a1585fb22255e77d1cb4ce43f88de3335a8eae5fada126ee71"} Oct 03 17:00:36 crc kubenswrapper[5081]: I1003 17:00:36.470735 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k4rrp" Oct 03 17:00:36 crc kubenswrapper[5081]: I1003 17:00:36.651767 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv6dm\" (UniqueName: \"kubernetes.io/projected/06474819-0ce0-4b8d-9cf9-460b4c452103-kube-api-access-cv6dm\") pod \"06474819-0ce0-4b8d-9cf9-460b4c452103\" (UID: \"06474819-0ce0-4b8d-9cf9-460b4c452103\") " Oct 03 17:00:36 crc kubenswrapper[5081]: I1003 17:00:36.657786 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06474819-0ce0-4b8d-9cf9-460b4c452103-kube-api-access-cv6dm" (OuterVolumeSpecName: "kube-api-access-cv6dm") pod "06474819-0ce0-4b8d-9cf9-460b4c452103" (UID: "06474819-0ce0-4b8d-9cf9-460b4c452103"). InnerVolumeSpecName "kube-api-access-cv6dm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:00:36 crc kubenswrapper[5081]: I1003 17:00:36.754011 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv6dm\" (UniqueName: \"kubernetes.io/projected/06474819-0ce0-4b8d-9cf9-460b4c452103-kube-api-access-cv6dm\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:37 crc kubenswrapper[5081]: I1003 17:00:37.124978 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k4rrp" event={"ID":"06474819-0ce0-4b8d-9cf9-460b4c452103","Type":"ContainerDied","Data":"f05f170dc0ecf9a1585fb22255e77d1cb4ce43f88de3335a8eae5fada126ee71"} Oct 03 17:00:37 crc kubenswrapper[5081]: I1003 17:00:37.125012 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f05f170dc0ecf9a1585fb22255e77d1cb4ce43f88de3335a8eae5fada126ee71" Oct 03 17:00:37 crc kubenswrapper[5081]: I1003 17:00:37.125024 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k4rrp" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.017341 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5bb4-account-create-btpfq"] Oct 03 17:00:44 crc kubenswrapper[5081]: E1003 17:00:44.024328 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06474819-0ce0-4b8d-9cf9-460b4c452103" containerName="mariadb-database-create" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.024363 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="06474819-0ce0-4b8d-9cf9-460b4c452103" containerName="mariadb-database-create" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.024549 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="06474819-0ce0-4b8d-9cf9-460b4c452103" containerName="mariadb-database-create" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.025277 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5bb4-account-create-btpfq" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.040058 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.041667 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5bb4-account-create-btpfq"] Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.176231 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdqvc\" (UniqueName: \"kubernetes.io/projected/1f46a0c4-ea57-4029-8248-8324ce4bcac3-kube-api-access-kdqvc\") pod \"glance-5bb4-account-create-btpfq\" (UID: \"1f46a0c4-ea57-4029-8248-8324ce4bcac3\") " pod="openstack/glance-5bb4-account-create-btpfq" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.277920 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdqvc\" (UniqueName: \"kubernetes.io/projected/1f46a0c4-ea57-4029-8248-8324ce4bcac3-kube-api-access-kdqvc\") pod \"glance-5bb4-account-create-btpfq\" (UID: \"1f46a0c4-ea57-4029-8248-8324ce4bcac3\") " pod="openstack/glance-5bb4-account-create-btpfq" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.298571 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdqvc\" (UniqueName: \"kubernetes.io/projected/1f46a0c4-ea57-4029-8248-8324ce4bcac3-kube-api-access-kdqvc\") pod \"glance-5bb4-account-create-btpfq\" (UID: \"1f46a0c4-ea57-4029-8248-8324ce4bcac3\") " pod="openstack/glance-5bb4-account-create-btpfq" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.362116 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5bb4-account-create-btpfq" Oct 03 17:00:44 crc kubenswrapper[5081]: I1003 17:00:44.772146 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5bb4-account-create-btpfq"] Oct 03 17:00:45 crc kubenswrapper[5081]: I1003 17:00:45.184332 5081 generic.go:334] "Generic (PLEG): container finished" podID="1f46a0c4-ea57-4029-8248-8324ce4bcac3" containerID="8f67d77fdc679b919c67c7d1fe8722f85fd52fd300d05b871dbc4651e6eeec65" exitCode=0 Oct 03 17:00:45 crc kubenswrapper[5081]: I1003 17:00:45.184414 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5bb4-account-create-btpfq" event={"ID":"1f46a0c4-ea57-4029-8248-8324ce4bcac3","Type":"ContainerDied","Data":"8f67d77fdc679b919c67c7d1fe8722f85fd52fd300d05b871dbc4651e6eeec65"} Oct 03 17:00:45 crc kubenswrapper[5081]: I1003 17:00:45.184672 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5bb4-account-create-btpfq" event={"ID":"1f46a0c4-ea57-4029-8248-8324ce4bcac3","Type":"ContainerStarted","Data":"8f1ae468f1af275ef05de3ecc4ec3ab8ccac9d7b2fa3e1d50fbc716a65cb2b53"} Oct 03 17:00:46 crc kubenswrapper[5081]: I1003 17:00:46.493027 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5bb4-account-create-btpfq" Oct 03 17:00:46 crc kubenswrapper[5081]: I1003 17:00:46.620679 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdqvc\" (UniqueName: \"kubernetes.io/projected/1f46a0c4-ea57-4029-8248-8324ce4bcac3-kube-api-access-kdqvc\") pod \"1f46a0c4-ea57-4029-8248-8324ce4bcac3\" (UID: \"1f46a0c4-ea57-4029-8248-8324ce4bcac3\") " Oct 03 17:00:46 crc kubenswrapper[5081]: I1003 17:00:46.626230 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f46a0c4-ea57-4029-8248-8324ce4bcac3-kube-api-access-kdqvc" (OuterVolumeSpecName: "kube-api-access-kdqvc") pod "1f46a0c4-ea57-4029-8248-8324ce4bcac3" (UID: "1f46a0c4-ea57-4029-8248-8324ce4bcac3"). InnerVolumeSpecName "kube-api-access-kdqvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:00:46 crc kubenswrapper[5081]: I1003 17:00:46.723030 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdqvc\" (UniqueName: \"kubernetes.io/projected/1f46a0c4-ea57-4029-8248-8324ce4bcac3-kube-api-access-kdqvc\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:47 crc kubenswrapper[5081]: I1003 17:00:47.201788 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5bb4-account-create-btpfq" event={"ID":"1f46a0c4-ea57-4029-8248-8324ce4bcac3","Type":"ContainerDied","Data":"8f1ae468f1af275ef05de3ecc4ec3ab8ccac9d7b2fa3e1d50fbc716a65cb2b53"} Oct 03 17:00:47 crc kubenswrapper[5081]: I1003 17:00:47.201830 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f1ae468f1af275ef05de3ecc4ec3ab8ccac9d7b2fa3e1d50fbc716a65cb2b53" Oct 03 17:00:47 crc kubenswrapper[5081]: I1003 17:00:47.201878 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5bb4-account-create-btpfq" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.179019 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-s9rss"] Oct 03 17:00:49 crc kubenswrapper[5081]: E1003 17:00:49.180504 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f46a0c4-ea57-4029-8248-8324ce4bcac3" containerName="mariadb-account-create" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.180533 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f46a0c4-ea57-4029-8248-8324ce4bcac3" containerName="mariadb-account-create" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.180843 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f46a0c4-ea57-4029-8248-8324ce4bcac3" containerName="mariadb-account-create" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.181850 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.186198 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.186382 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-m2mwj" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.188408 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-s9rss"] Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.264633 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-combined-ca-bundle\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.264751 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-config-data\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.264883 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5mh7\" (UniqueName: \"kubernetes.io/projected/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-kube-api-access-k5mh7\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.264953 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-db-sync-config-data\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.366707 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-combined-ca-bundle\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.367125 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-config-data\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.367710 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5mh7\" (UniqueName: \"kubernetes.io/projected/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-kube-api-access-k5mh7\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.367739 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-db-sync-config-data\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.376148 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-db-sync-config-data\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.384514 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-config-data\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.385112 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-combined-ca-bundle\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.388424 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5mh7\" (UniqueName: \"kubernetes.io/projected/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-kube-api-access-k5mh7\") pod \"glance-db-sync-s9rss\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:49 crc kubenswrapper[5081]: I1003 17:00:49.501901 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:50 crc kubenswrapper[5081]: I1003 17:00:50.085514 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-s9rss"] Oct 03 17:00:50 crc kubenswrapper[5081]: I1003 17:00:50.231435 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-s9rss" event={"ID":"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4","Type":"ContainerStarted","Data":"4c8790eda3d57ff1c1cc9f40f9a5bbd582d84bf6441809db61c7fe4b9c5a6bde"} Oct 03 17:00:51 crc kubenswrapper[5081]: I1003 17:00:51.240903 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-s9rss" event={"ID":"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4","Type":"ContainerStarted","Data":"49a96e0d646de58d1bb70d7c637f4c768f33c49f0a6859725d1e8e499831256d"} Oct 03 17:00:51 crc kubenswrapper[5081]: I1003 17:00:51.263606 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-s9rss" podStartSLOduration=2.2635853089999998 podStartE2EDuration="2.263585309s" podCreationTimestamp="2025-10-03 17:00:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:00:51.257322239 +0000 UTC m=+5570.222878872" watchObservedRunningTime="2025-10-03 17:00:51.263585309 +0000 UTC m=+5570.229141932" Oct 03 17:00:55 crc kubenswrapper[5081]: I1003 17:00:55.272724 5081 generic.go:334] "Generic (PLEG): container finished" podID="2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" containerID="49a96e0d646de58d1bb70d7c637f4c768f33c49f0a6859725d1e8e499831256d" exitCode=0 Oct 03 17:00:55 crc kubenswrapper[5081]: I1003 17:00:55.272815 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-s9rss" event={"ID":"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4","Type":"ContainerDied","Data":"49a96e0d646de58d1bb70d7c637f4c768f33c49f0a6859725d1e8e499831256d"} Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.667633 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.815009 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-db-sync-config-data\") pod \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.815062 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-combined-ca-bundle\") pod \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.815132 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-config-data\") pod \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.815238 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5mh7\" (UniqueName: \"kubernetes.io/projected/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-kube-api-access-k5mh7\") pod \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\" (UID: \"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4\") " Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.821143 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-kube-api-access-k5mh7" (OuterVolumeSpecName: "kube-api-access-k5mh7") pod "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" (UID: "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4"). InnerVolumeSpecName "kube-api-access-k5mh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.823980 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" (UID: "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.843345 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" (UID: "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.858293 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-config-data" (OuterVolumeSpecName: "config-data") pod "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" (UID: "2ac283c5-027d-4cf6-be4a-8eb9451c2fb4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.920221 5081 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.920480 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.920547 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:56 crc kubenswrapper[5081]: I1003 17:00:56.920628 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5mh7\" (UniqueName: \"kubernetes.io/projected/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4-kube-api-access-k5mh7\") on node \"crc\" DevicePath \"\"" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.289002 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-s9rss" event={"ID":"2ac283c5-027d-4cf6-be4a-8eb9451c2fb4","Type":"ContainerDied","Data":"4c8790eda3d57ff1c1cc9f40f9a5bbd582d84bf6441809db61c7fe4b9c5a6bde"} Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.289041 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c8790eda3d57ff1c1cc9f40f9a5bbd582d84bf6441809db61c7fe4b9c5a6bde" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.289068 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-s9rss" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.587152 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:00:57 crc kubenswrapper[5081]: E1003 17:00:57.587598 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" containerName="glance-db-sync" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.587615 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" containerName="glance-db-sync" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.587815 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" containerName="glance-db-sync" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.588903 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.591445 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.592134 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.592178 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.601099 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-m2mwj" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.602012 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.707648 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c89d5bb8c-86tfw"] Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.710678 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.722306 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c89d5bb8c-86tfw"] Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.736071 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.736164 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxfz8\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-kube-api-access-hxfz8\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.736315 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-logs\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.736391 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-ceph\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.736436 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.736515 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.736667 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.816358 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.821473 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.824330 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838314 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-config\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838352 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838380 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838414 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-sb\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838450 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838484 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxfz8\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-kube-api-access-hxfz8\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838530 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-dns-svc\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838558 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqv9h\" (UniqueName: \"kubernetes.io/projected/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-kube-api-access-sqv9h\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838618 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-logs\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838661 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-ceph\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838691 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.838711 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-nb\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.839330 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.839873 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.841056 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-logs\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.845044 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.846078 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.853280 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-ceph\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.865847 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.868118 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxfz8\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-kube-api-access-hxfz8\") pod \"glance-default-external-api-0\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.906962 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.939921 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-sb\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940033 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940069 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940097 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940120 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940155 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-dns-svc\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940176 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqv9h\" (UniqueName: \"kubernetes.io/projected/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-kube-api-access-sqv9h\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940227 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-ceph\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940271 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sg5x\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-kube-api-access-2sg5x\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940312 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-nb\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940378 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-config\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.940411 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.941441 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-dns-svc\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.941908 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-sb\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.942369 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-nb\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.942734 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-config\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:57 crc kubenswrapper[5081]: I1003 17:00:57.967799 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqv9h\" (UniqueName: \"kubernetes.io/projected/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-kube-api-access-sqv9h\") pod \"dnsmasq-dns-5c89d5bb8c-86tfw\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.039630 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.041458 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-ceph\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.041517 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sg5x\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-kube-api-access-2sg5x\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.041625 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.041733 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.041765 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.041786 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.041805 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.046390 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-ceph\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.053846 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.054048 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.054393 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.055320 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.070965 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sg5x\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-kube-api-access-2sg5x\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.074860 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.147278 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.495731 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.563665 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c89d5bb8c-86tfw"] Oct 03 17:00:58 crc kubenswrapper[5081]: I1003 17:00:58.852643 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:00:59 crc kubenswrapper[5081]: I1003 17:00:59.207476 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:00:59 crc kubenswrapper[5081]: I1003 17:00:59.336805 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e554166-c3fb-44e7-8a1c-100c83e4fff7","Type":"ContainerStarted","Data":"f6d807ca81b1b6e762c74ed18faea022bd7ee6905b3e1d41f22befea0fbd55bf"} Oct 03 17:00:59 crc kubenswrapper[5081]: I1003 17:00:59.344165 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4235090d-ac31-4912-ab27-3dee6abeeea3","Type":"ContainerStarted","Data":"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49"} Oct 03 17:00:59 crc kubenswrapper[5081]: I1003 17:00:59.344213 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4235090d-ac31-4912-ab27-3dee6abeeea3","Type":"ContainerStarted","Data":"c7b2abcfb18b53811cb9431625a70a02d91cbc460d339c2c2d4762e2f7db4c84"} Oct 03 17:00:59 crc kubenswrapper[5081]: I1003 17:00:59.347454 5081 generic.go:334] "Generic (PLEG): container finished" podID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerID="92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5" exitCode=0 Oct 03 17:00:59 crc kubenswrapper[5081]: I1003 17:00:59.347502 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" event={"ID":"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa","Type":"ContainerDied","Data":"92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5"} Oct 03 17:00:59 crc kubenswrapper[5081]: I1003 17:00:59.347533 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" event={"ID":"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa","Type":"ContainerStarted","Data":"a7b5b1af826e5b45cd3afa18522290917ae6e43cc065424b3b00ddc530918caa"} Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.144454 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29325181-bctjv"] Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.147127 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.160860 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325181-bctjv"] Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.305523 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-config-data\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.305649 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-fernet-keys\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.305726 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2fjc\" (UniqueName: \"kubernetes.io/projected/d4bf20b9-db74-4f02-bc9a-22f16465e199-kube-api-access-f2fjc\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.305749 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-combined-ca-bundle\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.364044 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e554166-c3fb-44e7-8a1c-100c83e4fff7","Type":"ContainerStarted","Data":"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc"} Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.366453 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4235090d-ac31-4912-ab27-3dee6abeeea3","Type":"ContainerStarted","Data":"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc"} Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.366608 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-log" containerID="cri-o://d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49" gracePeriod=30 Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.366661 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-httpd" containerID="cri-o://a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc" gracePeriod=30 Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.372480 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" event={"ID":"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa","Type":"ContainerStarted","Data":"813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122"} Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.372948 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.397983 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.397957779 podStartE2EDuration="3.397957779s" podCreationTimestamp="2025-10-03 17:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:00.387260031 +0000 UTC m=+5579.352816664" watchObservedRunningTime="2025-10-03 17:01:00.397957779 +0000 UTC m=+5579.363514402" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.408738 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2fjc\" (UniqueName: \"kubernetes.io/projected/d4bf20b9-db74-4f02-bc9a-22f16465e199-kube-api-access-f2fjc\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.408822 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-combined-ca-bundle\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.408867 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-config-data\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.408964 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-fernet-keys\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.418069 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-combined-ca-bundle\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.435991 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2fjc\" (UniqueName: \"kubernetes.io/projected/d4bf20b9-db74-4f02-bc9a-22f16465e199-kube-api-access-f2fjc\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.437817 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-config-data\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.453631 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-fernet-keys\") pod \"keystone-cron-29325181-bctjv\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.484910 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.943837 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" podStartSLOduration=3.943814851 podStartE2EDuration="3.943814851s" podCreationTimestamp="2025-10-03 17:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:00.421517896 +0000 UTC m=+5579.387074519" watchObservedRunningTime="2025-10-03 17:01:00.943814851 +0000 UTC m=+5579.909371474" Oct 03 17:01:00 crc kubenswrapper[5081]: I1003 17:01:00.956641 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325181-bctjv"] Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.335234 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.382911 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325181-bctjv" event={"ID":"d4bf20b9-db74-4f02-bc9a-22f16465e199","Type":"ContainerStarted","Data":"1c0a7a83fcb1df3540fea1a0b1ca3be0063a1d323ee9bb73d607357b26a20ff4"} Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.382948 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325181-bctjv" event={"ID":"d4bf20b9-db74-4f02-bc9a-22f16465e199","Type":"ContainerStarted","Data":"ed2dcf5302eab2c61ca6395484845980b2dcbd50211a76c8c1af04b0de160e09"} Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.384847 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e554166-c3fb-44e7-8a1c-100c83e4fff7","Type":"ContainerStarted","Data":"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d"} Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.386253 5081 generic.go:334] "Generic (PLEG): container finished" podID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerID="a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc" exitCode=0 Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.386282 5081 generic.go:334] "Generic (PLEG): container finished" podID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerID="d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49" exitCode=143 Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.386522 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.386526 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4235090d-ac31-4912-ab27-3dee6abeeea3","Type":"ContainerDied","Data":"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc"} Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.386628 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4235090d-ac31-4912-ab27-3dee6abeeea3","Type":"ContainerDied","Data":"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49"} Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.386655 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4235090d-ac31-4912-ab27-3dee6abeeea3","Type":"ContainerDied","Data":"c7b2abcfb18b53811cb9431625a70a02d91cbc460d339c2c2d4762e2f7db4c84"} Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.386676 5081 scope.go:117] "RemoveContainer" containerID="a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.411366 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29325181-bctjv" podStartSLOduration=1.411345851 podStartE2EDuration="1.411345851s" podCreationTimestamp="2025-10-03 17:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:01.40818063 +0000 UTC m=+5580.373737243" watchObservedRunningTime="2025-10-03 17:01:01.411345851 +0000 UTC m=+5580.376902464" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.425730 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxfz8\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-kube-api-access-hxfz8\") pod \"4235090d-ac31-4912-ab27-3dee6abeeea3\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.425782 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-ceph\") pod \"4235090d-ac31-4912-ab27-3dee6abeeea3\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.425869 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-config-data\") pod \"4235090d-ac31-4912-ab27-3dee6abeeea3\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.426018 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-scripts\") pod \"4235090d-ac31-4912-ab27-3dee6abeeea3\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.426164 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-httpd-run\") pod \"4235090d-ac31-4912-ab27-3dee6abeeea3\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.426251 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-combined-ca-bundle\") pod \"4235090d-ac31-4912-ab27-3dee6abeeea3\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.426303 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-logs\") pod \"4235090d-ac31-4912-ab27-3dee6abeeea3\" (UID: \"4235090d-ac31-4912-ab27-3dee6abeeea3\") " Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.427446 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-logs" (OuterVolumeSpecName: "logs") pod "4235090d-ac31-4912-ab27-3dee6abeeea3" (UID: "4235090d-ac31-4912-ab27-3dee6abeeea3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.430107 5081 scope.go:117] "RemoveContainer" containerID="d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.432745 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4235090d-ac31-4912-ab27-3dee6abeeea3" (UID: "4235090d-ac31-4912-ab27-3dee6abeeea3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.433094 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-ceph" (OuterVolumeSpecName: "ceph") pod "4235090d-ac31-4912-ab27-3dee6abeeea3" (UID: "4235090d-ac31-4912-ab27-3dee6abeeea3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.439898 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-kube-api-access-hxfz8" (OuterVolumeSpecName: "kube-api-access-hxfz8") pod "4235090d-ac31-4912-ab27-3dee6abeeea3" (UID: "4235090d-ac31-4912-ab27-3dee6abeeea3"). InnerVolumeSpecName "kube-api-access-hxfz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.441780 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-scripts" (OuterVolumeSpecName: "scripts") pod "4235090d-ac31-4912-ab27-3dee6abeeea3" (UID: "4235090d-ac31-4912-ab27-3dee6abeeea3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.488803 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-config-data" (OuterVolumeSpecName: "config-data") pod "4235090d-ac31-4912-ab27-3dee6abeeea3" (UID: "4235090d-ac31-4912-ab27-3dee6abeeea3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.491791 5081 scope.go:117] "RemoveContainer" containerID="a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc" Oct 03 17:01:01 crc kubenswrapper[5081]: E1003 17:01:01.509742 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc\": container with ID starting with a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc not found: ID does not exist" containerID="a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.509853 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc"} err="failed to get container status \"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc\": rpc error: code = NotFound desc = could not find container \"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc\": container with ID starting with a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc not found: ID does not exist" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.509889 5081 scope.go:117] "RemoveContainer" containerID="d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49" Oct 03 17:01:01 crc kubenswrapper[5081]: E1003 17:01:01.524737 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49\": container with ID starting with d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49 not found: ID does not exist" containerID="d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.524797 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49"} err="failed to get container status \"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49\": rpc error: code = NotFound desc = could not find container \"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49\": container with ID starting with d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49 not found: ID does not exist" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.524830 5081 scope.go:117] "RemoveContainer" containerID="a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.525379 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc"} err="failed to get container status \"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc\": rpc error: code = NotFound desc = could not find container \"a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc\": container with ID starting with a8d404535e938ba0a2876efe4be23318cd79a1f402677667e784612ad6522cfc not found: ID does not exist" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.525428 5081 scope.go:117] "RemoveContainer" containerID="d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.525743 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49"} err="failed to get container status \"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49\": rpc error: code = NotFound desc = could not find container \"d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49\": container with ID starting with d21cbb2151549bc326303e326782cd0f2a12017ababf8c12a7450ddf46f30a49 not found: ID does not exist" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.528429 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.528485 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.528495 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxfz8\" (UniqueName: \"kubernetes.io/projected/4235090d-ac31-4912-ab27-3dee6abeeea3-kube-api-access-hxfz8\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.528503 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.528512 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.528520 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4235090d-ac31-4912-ab27-3dee6abeeea3-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.554717 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4235090d-ac31-4912-ab27-3dee6abeeea3" (UID: "4235090d-ac31-4912-ab27-3dee6abeeea3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.630730 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4235090d-ac31-4912-ab27-3dee6abeeea3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.720440 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.720421582 podStartE2EDuration="4.720421582s" podCreationTimestamp="2025-10-03 17:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:01.43806572 +0000 UTC m=+5580.403622343" watchObservedRunningTime="2025-10-03 17:01:01.720421582 +0000 UTC m=+5580.685978195" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.727158 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.741731 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.756412 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:01:01 crc kubenswrapper[5081]: E1003 17:01:01.756824 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-log" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.756845 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-log" Oct 03 17:01:01 crc kubenswrapper[5081]: E1003 17:01:01.756861 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-httpd" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.756867 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-httpd" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.757037 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-httpd" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.760471 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" containerName="glance-log" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.761868 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.769468 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.769800 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.834088 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-logs\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.834489 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-ceph\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.834539 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shcfb\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-kube-api-access-shcfb\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.834607 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-config-data\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.834642 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.834746 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.834845 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-scripts\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.842947 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4235090d-ac31-4912-ab27-3dee6abeeea3" path="/var/lib/kubelet/pods/4235090d-ac31-4912-ab27-3dee6abeeea3/volumes" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936147 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-ceph\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936208 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shcfb\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-kube-api-access-shcfb\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936251 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-config-data\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936283 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936379 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936416 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-scripts\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936480 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-logs\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.936975 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-logs\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.940087 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.941617 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-ceph\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.943712 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.943938 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.945209 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-scripts\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.956787 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-config-data\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:01 crc kubenswrapper[5081]: I1003 17:01:01.968950 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shcfb\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-kube-api-access-shcfb\") pod \"glance-default-external-api-0\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " pod="openstack/glance-default-external-api-0" Oct 03 17:01:02 crc kubenswrapper[5081]: I1003 17:01:02.092856 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:01:03 crc kubenswrapper[5081]: I1003 17:01:02.694111 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:01:03 crc kubenswrapper[5081]: I1003 17:01:02.704366 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:01:03 crc kubenswrapper[5081]: W1003 17:01:02.716536 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a43ee1d_14ae_45e8_a9cf_dc7e8199fccd.slice/crio-315de9083df5fba301c1adaa662e561cfd934cf05345f22e6d3e675b0bba08b9 WatchSource:0}: Error finding container 315de9083df5fba301c1adaa662e561cfd934cf05345f22e6d3e675b0bba08b9: Status 404 returned error can't find the container with id 315de9083df5fba301c1adaa662e561cfd934cf05345f22e6d3e675b0bba08b9 Oct 03 17:01:03 crc kubenswrapper[5081]: I1003 17:01:03.435650 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd","Type":"ContainerStarted","Data":"acdc661f2e244c0c065a5bc97bdb9db20f5c485759f7b2486ab3cae7b7456a2a"} Oct 03 17:01:03 crc kubenswrapper[5081]: I1003 17:01:03.435693 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd","Type":"ContainerStarted","Data":"315de9083df5fba301c1adaa662e561cfd934cf05345f22e6d3e675b0bba08b9"} Oct 03 17:01:03 crc kubenswrapper[5081]: I1003 17:01:03.435795 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-log" containerID="cri-o://0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc" gracePeriod=30 Oct 03 17:01:03 crc kubenswrapper[5081]: I1003 17:01:03.435837 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-httpd" containerID="cri-o://09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d" gracePeriod=30 Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.322671 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.393537 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-httpd-run\") pod \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.393659 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-ceph\") pod \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.393708 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-logs\") pod \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.393777 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-combined-ca-bundle\") pod \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.393823 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sg5x\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-kube-api-access-2sg5x\") pod \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.393958 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-config-data\") pod \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.394002 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-scripts\") pod \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\" (UID: \"4e554166-c3fb-44e7-8a1c-100c83e4fff7\") " Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.394115 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4e554166-c3fb-44e7-8a1c-100c83e4fff7" (UID: "4e554166-c3fb-44e7-8a1c-100c83e4fff7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.394267 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-logs" (OuterVolumeSpecName: "logs") pod "4e554166-c3fb-44e7-8a1c-100c83e4fff7" (UID: "4e554166-c3fb-44e7-8a1c-100c83e4fff7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.394642 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.394667 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e554166-c3fb-44e7-8a1c-100c83e4fff7-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.400343 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-ceph" (OuterVolumeSpecName: "ceph") pod "4e554166-c3fb-44e7-8a1c-100c83e4fff7" (UID: "4e554166-c3fb-44e7-8a1c-100c83e4fff7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.401504 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-scripts" (OuterVolumeSpecName: "scripts") pod "4e554166-c3fb-44e7-8a1c-100c83e4fff7" (UID: "4e554166-c3fb-44e7-8a1c-100c83e4fff7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.401688 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-kube-api-access-2sg5x" (OuterVolumeSpecName: "kube-api-access-2sg5x") pod "4e554166-c3fb-44e7-8a1c-100c83e4fff7" (UID: "4e554166-c3fb-44e7-8a1c-100c83e4fff7"). InnerVolumeSpecName "kube-api-access-2sg5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.420714 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e554166-c3fb-44e7-8a1c-100c83e4fff7" (UID: "4e554166-c3fb-44e7-8a1c-100c83e4fff7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.442864 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-config-data" (OuterVolumeSpecName: "config-data") pod "4e554166-c3fb-44e7-8a1c-100c83e4fff7" (UID: "4e554166-c3fb-44e7-8a1c-100c83e4fff7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.447264 5081 generic.go:334] "Generic (PLEG): container finished" podID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerID="09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d" exitCode=0 Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.447322 5081 generic.go:334] "Generic (PLEG): container finished" podID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerID="0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc" exitCode=143 Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.447351 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.447796 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e554166-c3fb-44e7-8a1c-100c83e4fff7","Type":"ContainerDied","Data":"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d"} Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.448265 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e554166-c3fb-44e7-8a1c-100c83e4fff7","Type":"ContainerDied","Data":"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc"} Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.448285 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e554166-c3fb-44e7-8a1c-100c83e4fff7","Type":"ContainerDied","Data":"f6d807ca81b1b6e762c74ed18faea022bd7ee6905b3e1d41f22befea0fbd55bf"} Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.448321 5081 scope.go:117] "RemoveContainer" containerID="09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.454119 5081 generic.go:334] "Generic (PLEG): container finished" podID="d4bf20b9-db74-4f02-bc9a-22f16465e199" containerID="1c0a7a83fcb1df3540fea1a0b1ca3be0063a1d323ee9bb73d607357b26a20ff4" exitCode=0 Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.454223 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325181-bctjv" event={"ID":"d4bf20b9-db74-4f02-bc9a-22f16465e199","Type":"ContainerDied","Data":"1c0a7a83fcb1df3540fea1a0b1ca3be0063a1d323ee9bb73d607357b26a20ff4"} Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.457864 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd","Type":"ContainerStarted","Data":"628a28388f28676e53095920db723214ae442bb97698a0b3fb656802895ecd19"} Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.503845 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.503818872 podStartE2EDuration="3.503818872s" podCreationTimestamp="2025-10-03 17:01:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:04.490419217 +0000 UTC m=+5583.455975830" watchObservedRunningTime="2025-10-03 17:01:04.503818872 +0000 UTC m=+5583.469375495" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.506584 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.506605 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.506616 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.506625 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e554166-c3fb-44e7-8a1c-100c83e4fff7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.506635 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sg5x\" (UniqueName: \"kubernetes.io/projected/4e554166-c3fb-44e7-8a1c-100c83e4fff7-kube-api-access-2sg5x\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.542165 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.542341 5081 scope.go:117] "RemoveContainer" containerID="0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.555951 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.567877 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:01:04 crc kubenswrapper[5081]: E1003 17:01:04.568461 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-httpd" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.568484 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-httpd" Oct 03 17:01:04 crc kubenswrapper[5081]: E1003 17:01:04.568523 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-log" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.568532 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-log" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.568737 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-log" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.568765 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" containerName="glance-httpd" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.570049 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.574003 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.580943 5081 scope.go:117] "RemoveContainer" containerID="09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d" Oct 03 17:01:04 crc kubenswrapper[5081]: E1003 17:01:04.581749 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d\": container with ID starting with 09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d not found: ID does not exist" containerID="09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.581819 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d"} err="failed to get container status \"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d\": rpc error: code = NotFound desc = could not find container \"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d\": container with ID starting with 09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d not found: ID does not exist" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.581849 5081 scope.go:117] "RemoveContainer" containerID="0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc" Oct 03 17:01:04 crc kubenswrapper[5081]: E1003 17:01:04.582416 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc\": container with ID starting with 0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc not found: ID does not exist" containerID="0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.582482 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc"} err="failed to get container status \"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc\": rpc error: code = NotFound desc = could not find container \"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc\": container with ID starting with 0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc not found: ID does not exist" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.582501 5081 scope.go:117] "RemoveContainer" containerID="09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.583246 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d"} err="failed to get container status \"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d\": rpc error: code = NotFound desc = could not find container \"09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d\": container with ID starting with 09bb1cd543a5db95ea85f7638442f9da12d378e04491104b723342b96f32083d not found: ID does not exist" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.583274 5081 scope.go:117] "RemoveContainer" containerID="0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.584379 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc"} err="failed to get container status \"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc\": rpc error: code = NotFound desc = could not find container \"0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc\": container with ID starting with 0aad06d3288869505a4e3f5c341ac121968dd850d596e9dc5f774ed514e1a6cc not found: ID does not exist" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.611221 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfzlr\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-kube-api-access-qfzlr\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.611296 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.611313 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.611339 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.611430 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.611489 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.611530 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-logs\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.617027 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.712612 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.712990 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-logs\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.713042 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfzlr\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-kube-api-access-qfzlr\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.713079 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.713100 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.713127 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.713223 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.714130 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-logs\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.715167 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.719141 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.720159 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.720881 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.724295 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.744910 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfzlr\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-kube-api-access-qfzlr\") pod \"glance-default-internal-api-0\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:01:04 crc kubenswrapper[5081]: I1003 17:01:04.911100 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.454974 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.840429 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e554166-c3fb-44e7-8a1c-100c83e4fff7" path="/var/lib/kubelet/pods/4e554166-c3fb-44e7-8a1c-100c83e4fff7/volumes" Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.862220 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.935046 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-fernet-keys\") pod \"d4bf20b9-db74-4f02-bc9a-22f16465e199\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.935124 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-config-data\") pod \"d4bf20b9-db74-4f02-bc9a-22f16465e199\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.935222 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2fjc\" (UniqueName: \"kubernetes.io/projected/d4bf20b9-db74-4f02-bc9a-22f16465e199-kube-api-access-f2fjc\") pod \"d4bf20b9-db74-4f02-bc9a-22f16465e199\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.935258 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-combined-ca-bundle\") pod \"d4bf20b9-db74-4f02-bc9a-22f16465e199\" (UID: \"d4bf20b9-db74-4f02-bc9a-22f16465e199\") " Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.943388 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d4bf20b9-db74-4f02-bc9a-22f16465e199" (UID: "d4bf20b9-db74-4f02-bc9a-22f16465e199"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.943583 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4bf20b9-db74-4f02-bc9a-22f16465e199-kube-api-access-f2fjc" (OuterVolumeSpecName: "kube-api-access-f2fjc") pod "d4bf20b9-db74-4f02-bc9a-22f16465e199" (UID: "d4bf20b9-db74-4f02-bc9a-22f16465e199"). InnerVolumeSpecName "kube-api-access-f2fjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.975871 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4bf20b9-db74-4f02-bc9a-22f16465e199" (UID: "d4bf20b9-db74-4f02-bc9a-22f16465e199"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:05 crc kubenswrapper[5081]: I1003 17:01:05.995234 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-config-data" (OuterVolumeSpecName: "config-data") pod "d4bf20b9-db74-4f02-bc9a-22f16465e199" (UID: "d4bf20b9-db74-4f02-bc9a-22f16465e199"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.037583 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.037635 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2fjc\" (UniqueName: \"kubernetes.io/projected/d4bf20b9-db74-4f02-bc9a-22f16465e199-kube-api-access-f2fjc\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.037649 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.037670 5081 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4bf20b9-db74-4f02-bc9a-22f16465e199-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.483729 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0cbd888b-86fb-4803-ae3a-65361d9eec55","Type":"ContainerStarted","Data":"1bcca2f2a9bb1086f916a6cb1c1c1701092af01c703518c1fd720876f7f11fde"} Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.484491 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0cbd888b-86fb-4803-ae3a-65361d9eec55","Type":"ContainerStarted","Data":"f6ed16d31060087df6805d3431df6e33e3e171002b87e26e8c1618f732a8f961"} Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.486131 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325181-bctjv" event={"ID":"d4bf20b9-db74-4f02-bc9a-22f16465e199","Type":"ContainerDied","Data":"ed2dcf5302eab2c61ca6395484845980b2dcbd50211a76c8c1af04b0de160e09"} Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.486218 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed2dcf5302eab2c61ca6395484845980b2dcbd50211a76c8c1af04b0de160e09" Oct 03 17:01:06 crc kubenswrapper[5081]: I1003 17:01:06.486219 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325181-bctjv" Oct 03 17:01:07 crc kubenswrapper[5081]: I1003 17:01:07.497017 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0cbd888b-86fb-4803-ae3a-65361d9eec55","Type":"ContainerStarted","Data":"8563384337f9d0433ee70252cdf3eafa306f54b7a3ad9ba320231c6a25526852"} Oct 03 17:01:07 crc kubenswrapper[5081]: I1003 17:01:07.517936 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.517914629 podStartE2EDuration="3.517914629s" podCreationTimestamp="2025-10-03 17:01:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:07.5176061 +0000 UTC m=+5586.483162723" watchObservedRunningTime="2025-10-03 17:01:07.517914629 +0000 UTC m=+5586.483471242" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.040816 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.093471 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f68ccf757-t6khk"] Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.093749 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" podUID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerName="dnsmasq-dns" containerID="cri-o://5f2ff17e5d38c501d6ffc163fbae0338020c6ccc012deb306f3901dbcae315e4" gracePeriod=10 Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.514328 5081 generic.go:334] "Generic (PLEG): container finished" podID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerID="5f2ff17e5d38c501d6ffc163fbae0338020c6ccc012deb306f3901dbcae315e4" exitCode=0 Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.514446 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" event={"ID":"71f04f9c-87b6-49ea-bd69-fc055af47341","Type":"ContainerDied","Data":"5f2ff17e5d38c501d6ffc163fbae0338020c6ccc012deb306f3901dbcae315e4"} Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.514900 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" event={"ID":"71f04f9c-87b6-49ea-bd69-fc055af47341","Type":"ContainerDied","Data":"dd86f5e1c05549fb63a00a6bd71b45252ef5a0b426da3b0c34d852ca6588a908"} Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.514921 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd86f5e1c05549fb63a00a6bd71b45252ef5a0b426da3b0c34d852ca6588a908" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.584680 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.683709 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-sb\") pod \"71f04f9c-87b6-49ea-bd69-fc055af47341\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.683792 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-dns-svc\") pod \"71f04f9c-87b6-49ea-bd69-fc055af47341\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.683873 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhrht\" (UniqueName: \"kubernetes.io/projected/71f04f9c-87b6-49ea-bd69-fc055af47341-kube-api-access-xhrht\") pod \"71f04f9c-87b6-49ea-bd69-fc055af47341\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.683925 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-nb\") pod \"71f04f9c-87b6-49ea-bd69-fc055af47341\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.684012 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-config\") pod \"71f04f9c-87b6-49ea-bd69-fc055af47341\" (UID: \"71f04f9c-87b6-49ea-bd69-fc055af47341\") " Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.692613 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71f04f9c-87b6-49ea-bd69-fc055af47341-kube-api-access-xhrht" (OuterVolumeSpecName: "kube-api-access-xhrht") pod "71f04f9c-87b6-49ea-bd69-fc055af47341" (UID: "71f04f9c-87b6-49ea-bd69-fc055af47341"). InnerVolumeSpecName "kube-api-access-xhrht". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.735426 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "71f04f9c-87b6-49ea-bd69-fc055af47341" (UID: "71f04f9c-87b6-49ea-bd69-fc055af47341"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.739807 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "71f04f9c-87b6-49ea-bd69-fc055af47341" (UID: "71f04f9c-87b6-49ea-bd69-fc055af47341"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.740236 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-config" (OuterVolumeSpecName: "config") pod "71f04f9c-87b6-49ea-bd69-fc055af47341" (UID: "71f04f9c-87b6-49ea-bd69-fc055af47341"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.752729 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "71f04f9c-87b6-49ea-bd69-fc055af47341" (UID: "71f04f9c-87b6-49ea-bd69-fc055af47341"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.786739 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhrht\" (UniqueName: \"kubernetes.io/projected/71f04f9c-87b6-49ea-bd69-fc055af47341-kube-api-access-xhrht\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.786773 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.786784 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.786794 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:08 crc kubenswrapper[5081]: I1003 17:01:08.786805 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f04f9c-87b6-49ea-bd69-fc055af47341-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:09 crc kubenswrapper[5081]: I1003 17:01:09.524233 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f68ccf757-t6khk" Oct 03 17:01:09 crc kubenswrapper[5081]: I1003 17:01:09.563910 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f68ccf757-t6khk"] Oct 03 17:01:09 crc kubenswrapper[5081]: I1003 17:01:09.571901 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f68ccf757-t6khk"] Oct 03 17:01:09 crc kubenswrapper[5081]: I1003 17:01:09.837645 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71f04f9c-87b6-49ea-bd69-fc055af47341" path="/var/lib/kubelet/pods/71f04f9c-87b6-49ea-bd69-fc055af47341/volumes" Oct 03 17:01:12 crc kubenswrapper[5081]: I1003 17:01:12.094037 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 17:01:12 crc kubenswrapper[5081]: I1003 17:01:12.094421 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 17:01:12 crc kubenswrapper[5081]: I1003 17:01:12.129219 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 17:01:12 crc kubenswrapper[5081]: I1003 17:01:12.136200 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 17:01:12 crc kubenswrapper[5081]: I1003 17:01:12.550374 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 17:01:12 crc kubenswrapper[5081]: I1003 17:01:12.550431 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 17:01:14 crc kubenswrapper[5081]: I1003 17:01:14.489104 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 17:01:14 crc kubenswrapper[5081]: I1003 17:01:14.493412 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 17:01:14 crc kubenswrapper[5081]: I1003 17:01:14.911611 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:14 crc kubenswrapper[5081]: I1003 17:01:14.911986 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:14 crc kubenswrapper[5081]: I1003 17:01:14.940603 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:14 crc kubenswrapper[5081]: I1003 17:01:14.949184 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:15 crc kubenswrapper[5081]: I1003 17:01:15.580238 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:15 crc kubenswrapper[5081]: I1003 17:01:15.580284 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:17 crc kubenswrapper[5081]: I1003 17:01:17.550371 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:17 crc kubenswrapper[5081]: I1003 17:01:17.594421 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 17:01:17 crc kubenswrapper[5081]: I1003 17:01:17.629090 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.643961 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-rd4fq"] Oct 03 17:01:22 crc kubenswrapper[5081]: E1003 17:01:22.646625 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerName="dnsmasq-dns" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.646707 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerName="dnsmasq-dns" Oct 03 17:01:22 crc kubenswrapper[5081]: E1003 17:01:22.646849 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bf20b9-db74-4f02-bc9a-22f16465e199" containerName="keystone-cron" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.646915 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bf20b9-db74-4f02-bc9a-22f16465e199" containerName="keystone-cron" Oct 03 17:01:22 crc kubenswrapper[5081]: E1003 17:01:22.647009 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerName="init" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.647067 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerName="init" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.647302 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bf20b9-db74-4f02-bc9a-22f16465e199" containerName="keystone-cron" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.647360 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f04f9c-87b6-49ea-bd69-fc055af47341" containerName="dnsmasq-dns" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.648391 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rd4fq" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.669182 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-rd4fq"] Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.743251 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh5kt\" (UniqueName: \"kubernetes.io/projected/1df51ea5-3d03-4c89-82e9-10bc1b10b35c-kube-api-access-lh5kt\") pod \"placement-db-create-rd4fq\" (UID: \"1df51ea5-3d03-4c89-82e9-10bc1b10b35c\") " pod="openstack/placement-db-create-rd4fq" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.845162 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh5kt\" (UniqueName: \"kubernetes.io/projected/1df51ea5-3d03-4c89-82e9-10bc1b10b35c-kube-api-access-lh5kt\") pod \"placement-db-create-rd4fq\" (UID: \"1df51ea5-3d03-4c89-82e9-10bc1b10b35c\") " pod="openstack/placement-db-create-rd4fq" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.863684 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh5kt\" (UniqueName: \"kubernetes.io/projected/1df51ea5-3d03-4c89-82e9-10bc1b10b35c-kube-api-access-lh5kt\") pod \"placement-db-create-rd4fq\" (UID: \"1df51ea5-3d03-4c89-82e9-10bc1b10b35c\") " pod="openstack/placement-db-create-rd4fq" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.962110 5081 scope.go:117] "RemoveContainer" containerID="96fbb2c020509860763b0f28e3fed45f8510de4a0bcb71a092aac3bad6d6e217" Oct 03 17:01:22 crc kubenswrapper[5081]: I1003 17:01:22.969757 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rd4fq" Oct 03 17:01:23 crc kubenswrapper[5081]: I1003 17:01:23.433868 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-rd4fq"] Oct 03 17:01:23 crc kubenswrapper[5081]: W1003 17:01:23.436091 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1df51ea5_3d03_4c89_82e9_10bc1b10b35c.slice/crio-042530944b9e3eb066dfdf4d803c559d5712f0680c83213834d1c1d2be0b3452 WatchSource:0}: Error finding container 042530944b9e3eb066dfdf4d803c559d5712f0680c83213834d1c1d2be0b3452: Status 404 returned error can't find the container with id 042530944b9e3eb066dfdf4d803c559d5712f0680c83213834d1c1d2be0b3452 Oct 03 17:01:23 crc kubenswrapper[5081]: I1003 17:01:23.653525 5081 generic.go:334] "Generic (PLEG): container finished" podID="1df51ea5-3d03-4c89-82e9-10bc1b10b35c" containerID="1e2de4cb5973b3fcff5d7e4adcc71758676813de4ee67f9ef023419d234bd94f" exitCode=0 Oct 03 17:01:23 crc kubenswrapper[5081]: I1003 17:01:23.653580 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rd4fq" event={"ID":"1df51ea5-3d03-4c89-82e9-10bc1b10b35c","Type":"ContainerDied","Data":"1e2de4cb5973b3fcff5d7e4adcc71758676813de4ee67f9ef023419d234bd94f"} Oct 03 17:01:23 crc kubenswrapper[5081]: I1003 17:01:23.653835 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rd4fq" event={"ID":"1df51ea5-3d03-4c89-82e9-10bc1b10b35c","Type":"ContainerStarted","Data":"042530944b9e3eb066dfdf4d803c559d5712f0680c83213834d1c1d2be0b3452"} Oct 03 17:01:24 crc kubenswrapper[5081]: I1003 17:01:24.974800 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rd4fq" Oct 03 17:01:25 crc kubenswrapper[5081]: I1003 17:01:25.084337 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh5kt\" (UniqueName: \"kubernetes.io/projected/1df51ea5-3d03-4c89-82e9-10bc1b10b35c-kube-api-access-lh5kt\") pod \"1df51ea5-3d03-4c89-82e9-10bc1b10b35c\" (UID: \"1df51ea5-3d03-4c89-82e9-10bc1b10b35c\") " Oct 03 17:01:25 crc kubenswrapper[5081]: I1003 17:01:25.094280 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1df51ea5-3d03-4c89-82e9-10bc1b10b35c-kube-api-access-lh5kt" (OuterVolumeSpecName: "kube-api-access-lh5kt") pod "1df51ea5-3d03-4c89-82e9-10bc1b10b35c" (UID: "1df51ea5-3d03-4c89-82e9-10bc1b10b35c"). InnerVolumeSpecName "kube-api-access-lh5kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:25 crc kubenswrapper[5081]: I1003 17:01:25.187061 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh5kt\" (UniqueName: \"kubernetes.io/projected/1df51ea5-3d03-4c89-82e9-10bc1b10b35c-kube-api-access-lh5kt\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:25 crc kubenswrapper[5081]: I1003 17:01:25.671609 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rd4fq" event={"ID":"1df51ea5-3d03-4c89-82e9-10bc1b10b35c","Type":"ContainerDied","Data":"042530944b9e3eb066dfdf4d803c559d5712f0680c83213834d1c1d2be0b3452"} Oct 03 17:01:25 crc kubenswrapper[5081]: I1003 17:01:25.671881 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="042530944b9e3eb066dfdf4d803c559d5712f0680c83213834d1c1d2be0b3452" Oct 03 17:01:25 crc kubenswrapper[5081]: I1003 17:01:25.671675 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rd4fq" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.653823 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-fbf2-account-create-z77t8"] Oct 03 17:01:32 crc kubenswrapper[5081]: E1003 17:01:32.654937 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df51ea5-3d03-4c89-82e9-10bc1b10b35c" containerName="mariadb-database-create" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.654955 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df51ea5-3d03-4c89-82e9-10bc1b10b35c" containerName="mariadb-database-create" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.655210 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1df51ea5-3d03-4c89-82e9-10bc1b10b35c" containerName="mariadb-database-create" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.656025 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fbf2-account-create-z77t8" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.657915 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.665776 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-fbf2-account-create-z77t8"] Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.814678 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qg9q\" (UniqueName: \"kubernetes.io/projected/07fb0241-0219-4ade-a23a-18ea596692a1-kube-api-access-6qg9q\") pod \"placement-fbf2-account-create-z77t8\" (UID: \"07fb0241-0219-4ade-a23a-18ea596692a1\") " pod="openstack/placement-fbf2-account-create-z77t8" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.916677 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qg9q\" (UniqueName: \"kubernetes.io/projected/07fb0241-0219-4ade-a23a-18ea596692a1-kube-api-access-6qg9q\") pod \"placement-fbf2-account-create-z77t8\" (UID: \"07fb0241-0219-4ade-a23a-18ea596692a1\") " pod="openstack/placement-fbf2-account-create-z77t8" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.935865 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qg9q\" (UniqueName: \"kubernetes.io/projected/07fb0241-0219-4ade-a23a-18ea596692a1-kube-api-access-6qg9q\") pod \"placement-fbf2-account-create-z77t8\" (UID: \"07fb0241-0219-4ade-a23a-18ea596692a1\") " pod="openstack/placement-fbf2-account-create-z77t8" Oct 03 17:01:32 crc kubenswrapper[5081]: I1003 17:01:32.978546 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fbf2-account-create-z77t8" Oct 03 17:01:33 crc kubenswrapper[5081]: I1003 17:01:33.400454 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-fbf2-account-create-z77t8"] Oct 03 17:01:33 crc kubenswrapper[5081]: I1003 17:01:33.728610 5081 generic.go:334] "Generic (PLEG): container finished" podID="07fb0241-0219-4ade-a23a-18ea596692a1" containerID="d672d848edbf3802fc589777f88d24286984a46b56a1fe5f9a6930b1934fb984" exitCode=0 Oct 03 17:01:33 crc kubenswrapper[5081]: I1003 17:01:33.728871 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fbf2-account-create-z77t8" event={"ID":"07fb0241-0219-4ade-a23a-18ea596692a1","Type":"ContainerDied","Data":"d672d848edbf3802fc589777f88d24286984a46b56a1fe5f9a6930b1934fb984"} Oct 03 17:01:33 crc kubenswrapper[5081]: I1003 17:01:33.730601 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fbf2-account-create-z77t8" event={"ID":"07fb0241-0219-4ade-a23a-18ea596692a1","Type":"ContainerStarted","Data":"51274643a20f214d9d69d793e8df6a50ff5a12cb6ec3e82c126aab63a4793499"} Oct 03 17:01:35 crc kubenswrapper[5081]: I1003 17:01:35.031202 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fbf2-account-create-z77t8" Oct 03 17:01:35 crc kubenswrapper[5081]: I1003 17:01:35.151412 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qg9q\" (UniqueName: \"kubernetes.io/projected/07fb0241-0219-4ade-a23a-18ea596692a1-kube-api-access-6qg9q\") pod \"07fb0241-0219-4ade-a23a-18ea596692a1\" (UID: \"07fb0241-0219-4ade-a23a-18ea596692a1\") " Oct 03 17:01:35 crc kubenswrapper[5081]: I1003 17:01:35.156610 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07fb0241-0219-4ade-a23a-18ea596692a1-kube-api-access-6qg9q" (OuterVolumeSpecName: "kube-api-access-6qg9q") pod "07fb0241-0219-4ade-a23a-18ea596692a1" (UID: "07fb0241-0219-4ade-a23a-18ea596692a1"). InnerVolumeSpecName "kube-api-access-6qg9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:35 crc kubenswrapper[5081]: I1003 17:01:35.254049 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qg9q\" (UniqueName: \"kubernetes.io/projected/07fb0241-0219-4ade-a23a-18ea596692a1-kube-api-access-6qg9q\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:35 crc kubenswrapper[5081]: I1003 17:01:35.746966 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-fbf2-account-create-z77t8" event={"ID":"07fb0241-0219-4ade-a23a-18ea596692a1","Type":"ContainerDied","Data":"51274643a20f214d9d69d793e8df6a50ff5a12cb6ec3e82c126aab63a4793499"} Oct 03 17:01:35 crc kubenswrapper[5081]: I1003 17:01:35.747283 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51274643a20f214d9d69d793e8df6a50ff5a12cb6ec3e82c126aab63a4793499" Oct 03 17:01:35 crc kubenswrapper[5081]: I1003 17:01:35.747018 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-fbf2-account-create-z77t8" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.894549 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d454cf57-bfrxw"] Oct 03 17:01:37 crc kubenswrapper[5081]: E1003 17:01:37.894964 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07fb0241-0219-4ade-a23a-18ea596692a1" containerName="mariadb-account-create" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.894981 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="07fb0241-0219-4ade-a23a-18ea596692a1" containerName="mariadb-account-create" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.895182 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="07fb0241-0219-4ade-a23a-18ea596692a1" containerName="mariadb-account-create" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.896143 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.921924 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d454cf57-bfrxw"] Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.931981 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-jm9jp"] Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.933084 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.935294 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.938394 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-87n54" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.938441 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 17:01:37 crc kubenswrapper[5081]: I1003 17:01:37.945764 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jm9jp"] Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.026517 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a89a4aa-b326-4925-a274-510acf1200e9-logs\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.026889 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-sb\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.026930 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddpdv\" (UniqueName: \"kubernetes.io/projected/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-kube-api-access-ddpdv\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.026975 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-combined-ca-bundle\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.026995 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-dns-svc\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.027052 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-config-data\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.027121 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-nb\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.027200 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swz2z\" (UniqueName: \"kubernetes.io/projected/2a89a4aa-b326-4925-a274-510acf1200e9-kube-api-access-swz2z\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.027225 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-scripts\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.027287 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-config\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129134 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-sb\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129222 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddpdv\" (UniqueName: \"kubernetes.io/projected/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-kube-api-access-ddpdv\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129280 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-combined-ca-bundle\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129309 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-dns-svc\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129339 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-config-data\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129362 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-nb\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129448 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swz2z\" (UniqueName: \"kubernetes.io/projected/2a89a4aa-b326-4925-a274-510acf1200e9-kube-api-access-swz2z\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129499 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-scripts\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129541 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-config\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.129577 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a89a4aa-b326-4925-a274-510acf1200e9-logs\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.130263 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a89a4aa-b326-4925-a274-510acf1200e9-logs\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.130650 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-sb\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.131377 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-dns-svc\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.132154 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-nb\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.132170 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-config\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.143701 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-config-data\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.146069 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-scripts\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.149118 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-combined-ca-bundle\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.156313 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddpdv\" (UniqueName: \"kubernetes.io/projected/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-kube-api-access-ddpdv\") pod \"dnsmasq-dns-59d454cf57-bfrxw\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.157307 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swz2z\" (UniqueName: \"kubernetes.io/projected/2a89a4aa-b326-4925-a274-510acf1200e9-kube-api-access-swz2z\") pod \"placement-db-sync-jm9jp\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.214361 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.262878 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.665412 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d454cf57-bfrxw"] Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.772772 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jm9jp"] Oct 03 17:01:38 crc kubenswrapper[5081]: I1003 17:01:38.780617 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" event={"ID":"eb56f1bb-3606-4cfc-b803-07e89f3dffaf","Type":"ContainerStarted","Data":"7863f7a20aa1d4dd258d5c5ac6a18502bd0778d1f078a9264c0b8afd72407ef7"} Oct 03 17:01:39 crc kubenswrapper[5081]: I1003 17:01:39.791170 5081 generic.go:334] "Generic (PLEG): container finished" podID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerID="3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6" exitCode=0 Oct 03 17:01:39 crc kubenswrapper[5081]: I1003 17:01:39.791294 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" event={"ID":"eb56f1bb-3606-4cfc-b803-07e89f3dffaf","Type":"ContainerDied","Data":"3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6"} Oct 03 17:01:39 crc kubenswrapper[5081]: I1003 17:01:39.793546 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jm9jp" event={"ID":"2a89a4aa-b326-4925-a274-510acf1200e9","Type":"ContainerStarted","Data":"6399490e5383323e3e22946240082d352ebff157f1442d1598a6ea16677f4252"} Oct 03 17:01:39 crc kubenswrapper[5081]: I1003 17:01:39.793592 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jm9jp" event={"ID":"2a89a4aa-b326-4925-a274-510acf1200e9","Type":"ContainerStarted","Data":"8db08fa0daae7c2a12d533f0f40876c087ce6a969eca751862ae3410c33d6b5d"} Oct 03 17:01:39 crc kubenswrapper[5081]: I1003 17:01:39.854645 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-jm9jp" podStartSLOduration=2.85453753 podStartE2EDuration="2.85453753s" podCreationTimestamp="2025-10-03 17:01:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:39.847857428 +0000 UTC m=+5618.813414051" watchObservedRunningTime="2025-10-03 17:01:39.85453753 +0000 UTC m=+5618.820094143" Oct 03 17:01:40 crc kubenswrapper[5081]: I1003 17:01:40.802516 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" event={"ID":"eb56f1bb-3606-4cfc-b803-07e89f3dffaf","Type":"ContainerStarted","Data":"aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099"} Oct 03 17:01:40 crc kubenswrapper[5081]: I1003 17:01:40.803148 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:40 crc kubenswrapper[5081]: I1003 17:01:40.805074 5081 generic.go:334] "Generic (PLEG): container finished" podID="2a89a4aa-b326-4925-a274-510acf1200e9" containerID="6399490e5383323e3e22946240082d352ebff157f1442d1598a6ea16677f4252" exitCode=0 Oct 03 17:01:40 crc kubenswrapper[5081]: I1003 17:01:40.805108 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jm9jp" event={"ID":"2a89a4aa-b326-4925-a274-510acf1200e9","Type":"ContainerDied","Data":"6399490e5383323e3e22946240082d352ebff157f1442d1598a6ea16677f4252"} Oct 03 17:01:40 crc kubenswrapper[5081]: I1003 17:01:40.825783 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" podStartSLOduration=3.825764109 podStartE2EDuration="3.825764109s" podCreationTimestamp="2025-10-03 17:01:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:40.818293495 +0000 UTC m=+5619.783850118" watchObservedRunningTime="2025-10-03 17:01:40.825764109 +0000 UTC m=+5619.791320722" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.254676 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.406490 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-config-data\") pod \"2a89a4aa-b326-4925-a274-510acf1200e9\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.406544 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-scripts\") pod \"2a89a4aa-b326-4925-a274-510acf1200e9\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.406603 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-combined-ca-bundle\") pod \"2a89a4aa-b326-4925-a274-510acf1200e9\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.406658 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a89a4aa-b326-4925-a274-510acf1200e9-logs\") pod \"2a89a4aa-b326-4925-a274-510acf1200e9\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.406688 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swz2z\" (UniqueName: \"kubernetes.io/projected/2a89a4aa-b326-4925-a274-510acf1200e9-kube-api-access-swz2z\") pod \"2a89a4aa-b326-4925-a274-510acf1200e9\" (UID: \"2a89a4aa-b326-4925-a274-510acf1200e9\") " Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.407730 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a89a4aa-b326-4925-a274-510acf1200e9-logs" (OuterVolumeSpecName: "logs") pod "2a89a4aa-b326-4925-a274-510acf1200e9" (UID: "2a89a4aa-b326-4925-a274-510acf1200e9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.422055 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-scripts" (OuterVolumeSpecName: "scripts") pod "2a89a4aa-b326-4925-a274-510acf1200e9" (UID: "2a89a4aa-b326-4925-a274-510acf1200e9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.422202 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a89a4aa-b326-4925-a274-510acf1200e9-kube-api-access-swz2z" (OuterVolumeSpecName: "kube-api-access-swz2z") pod "2a89a4aa-b326-4925-a274-510acf1200e9" (UID: "2a89a4aa-b326-4925-a274-510acf1200e9"). InnerVolumeSpecName "kube-api-access-swz2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.434812 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-config-data" (OuterVolumeSpecName: "config-data") pod "2a89a4aa-b326-4925-a274-510acf1200e9" (UID: "2a89a4aa-b326-4925-a274-510acf1200e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.437289 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a89a4aa-b326-4925-a274-510acf1200e9" (UID: "2a89a4aa-b326-4925-a274-510acf1200e9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.509037 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.509066 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.509074 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a89a4aa-b326-4925-a274-510acf1200e9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.509083 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a89a4aa-b326-4925-a274-510acf1200e9-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.509691 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swz2z\" (UniqueName: \"kubernetes.io/projected/2a89a4aa-b326-4925-a274-510acf1200e9-kube-api-access-swz2z\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.823880 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jm9jp" event={"ID":"2a89a4aa-b326-4925-a274-510acf1200e9","Type":"ContainerDied","Data":"8db08fa0daae7c2a12d533f0f40876c087ce6a969eca751862ae3410c33d6b5d"} Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.823939 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8db08fa0daae7c2a12d533f0f40876c087ce6a969eca751862ae3410c33d6b5d" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.823951 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jm9jp" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.915982 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6b65dbcc9d-xtvgl"] Oct 03 17:01:42 crc kubenswrapper[5081]: E1003 17:01:42.916778 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a89a4aa-b326-4925-a274-510acf1200e9" containerName="placement-db-sync" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.916798 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a89a4aa-b326-4925-a274-510acf1200e9" containerName="placement-db-sync" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.917047 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a89a4aa-b326-4925-a274-510acf1200e9" containerName="placement-db-sync" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.918030 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.922049 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.922068 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-87n54" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.924023 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 17:01:42 crc kubenswrapper[5081]: I1003 17:01:42.927493 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6b65dbcc9d-xtvgl"] Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.017837 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-config-data\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.017878 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-scripts\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.017916 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-combined-ca-bundle\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.017949 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-logs\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.018048 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hblmz\" (UniqueName: \"kubernetes.io/projected/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-kube-api-access-hblmz\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.119996 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-logs\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.120068 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hblmz\" (UniqueName: \"kubernetes.io/projected/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-kube-api-access-hblmz\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.120237 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-config-data\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.120271 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-scripts\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.120315 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-combined-ca-bundle\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.120659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-logs\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.124490 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-combined-ca-bundle\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.124828 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-config-data\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.131774 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-scripts\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.134910 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hblmz\" (UniqueName: \"kubernetes.io/projected/ad02f846-e187-4bdd-b2a0-0ba53fc9b6af-kube-api-access-hblmz\") pod \"placement-6b65dbcc9d-xtvgl\" (UID: \"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af\") " pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.235752 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.655361 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6b65dbcc9d-xtvgl"] Oct 03 17:01:43 crc kubenswrapper[5081]: W1003 17:01:43.664145 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad02f846_e187_4bdd_b2a0_0ba53fc9b6af.slice/crio-c94371dd2b14c87ec8fc4a52ccf742c0fb9a0d26a0de3a63c0d1b7b522b29dc3 WatchSource:0}: Error finding container c94371dd2b14c87ec8fc4a52ccf742c0fb9a0d26a0de3a63c0d1b7b522b29dc3: Status 404 returned error can't find the container with id c94371dd2b14c87ec8fc4a52ccf742c0fb9a0d26a0de3a63c0d1b7b522b29dc3 Oct 03 17:01:43 crc kubenswrapper[5081]: I1003 17:01:43.853965 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b65dbcc9d-xtvgl" event={"ID":"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af","Type":"ContainerStarted","Data":"c94371dd2b14c87ec8fc4a52ccf742c0fb9a0d26a0de3a63c0d1b7b522b29dc3"} Oct 03 17:01:44 crc kubenswrapper[5081]: I1003 17:01:44.855704 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b65dbcc9d-xtvgl" event={"ID":"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af","Type":"ContainerStarted","Data":"7e482b28298084b8e6d07c5324b0dc0fa306ec047b7151820ad02153189dfbbc"} Oct 03 17:01:44 crc kubenswrapper[5081]: I1003 17:01:44.856029 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b65dbcc9d-xtvgl" event={"ID":"ad02f846-e187-4bdd-b2a0-0ba53fc9b6af","Type":"ContainerStarted","Data":"dfac46336c69b6058c5b72ac7a31637b2ce00eece573ea7f6e5340c6d63bd588"} Oct 03 17:01:44 crc kubenswrapper[5081]: I1003 17:01:44.856047 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:44 crc kubenswrapper[5081]: I1003 17:01:44.856221 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:01:44 crc kubenswrapper[5081]: I1003 17:01:44.874055 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6b65dbcc9d-xtvgl" podStartSLOduration=2.874037876 podStartE2EDuration="2.874037876s" podCreationTimestamp="2025-10-03 17:01:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:01:44.870370491 +0000 UTC m=+5623.835927104" watchObservedRunningTime="2025-10-03 17:01:44.874037876 +0000 UTC m=+5623.839594489" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.216355 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.278034 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c89d5bb8c-86tfw"] Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.278639 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" podUID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerName="dnsmasq-dns" containerID="cri-o://813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122" gracePeriod=10 Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.732084 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.830298 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-nb\") pod \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.830424 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqv9h\" (UniqueName: \"kubernetes.io/projected/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-kube-api-access-sqv9h\") pod \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.830458 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-dns-svc\") pod \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.830525 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-config\") pod \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.830610 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-sb\") pod \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\" (UID: \"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa\") " Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.848727 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-kube-api-access-sqv9h" (OuterVolumeSpecName: "kube-api-access-sqv9h") pod "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" (UID: "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa"). InnerVolumeSpecName "kube-api-access-sqv9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.879368 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" (UID: "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.892429 5081 generic.go:334] "Generic (PLEG): container finished" podID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerID="813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122" exitCode=0 Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.892495 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.892495 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" event={"ID":"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa","Type":"ContainerDied","Data":"813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122"} Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.892647 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c89d5bb8c-86tfw" event={"ID":"37a53dd0-cc23-441f-adb2-2f69c2e2c5aa","Type":"ContainerDied","Data":"a7b5b1af826e5b45cd3afa18522290917ae6e43cc065424b3b00ddc530918caa"} Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.892691 5081 scope.go:117] "RemoveContainer" containerID="813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.897627 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-config" (OuterVolumeSpecName: "config") pod "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" (UID: "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.898695 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" (UID: "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.899381 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" (UID: "37a53dd0-cc23-441f-adb2-2f69c2e2c5aa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.935743 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.935778 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqv9h\" (UniqueName: \"kubernetes.io/projected/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-kube-api-access-sqv9h\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.935802 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.935815 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.935825 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.957001 5081 scope.go:117] "RemoveContainer" containerID="92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.973352 5081 scope.go:117] "RemoveContainer" containerID="813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122" Oct 03 17:01:48 crc kubenswrapper[5081]: E1003 17:01:48.973792 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122\": container with ID starting with 813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122 not found: ID does not exist" containerID="813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.973830 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122"} err="failed to get container status \"813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122\": rpc error: code = NotFound desc = could not find container \"813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122\": container with ID starting with 813b9caf177373ca74e71e9d87092a272c6ffdd97823507163ea16aebbf08122 not found: ID does not exist" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.973858 5081 scope.go:117] "RemoveContainer" containerID="92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5" Oct 03 17:01:48 crc kubenswrapper[5081]: E1003 17:01:48.974293 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5\": container with ID starting with 92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5 not found: ID does not exist" containerID="92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5" Oct 03 17:01:48 crc kubenswrapper[5081]: I1003 17:01:48.974322 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5"} err="failed to get container status \"92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5\": rpc error: code = NotFound desc = could not find container \"92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5\": container with ID starting with 92677e7c63a633fe4c0113972e80e9a929537ff0541aa4f4160b15beb1b788b5 not found: ID does not exist" Oct 03 17:01:49 crc kubenswrapper[5081]: I1003 17:01:49.238379 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c89d5bb8c-86tfw"] Oct 03 17:01:49 crc kubenswrapper[5081]: I1003 17:01:49.252237 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c89d5bb8c-86tfw"] Oct 03 17:01:49 crc kubenswrapper[5081]: I1003 17:01:49.837984 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" path="/var/lib/kubelet/pods/37a53dd0-cc23-441f-adb2-2f69c2e2c5aa/volumes" Oct 03 17:02:00 crc kubenswrapper[5081]: I1003 17:02:00.647517 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:02:00 crc kubenswrapper[5081]: I1003 17:02:00.648347 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:02:14 crc kubenswrapper[5081]: I1003 17:02:14.224737 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:02:15 crc kubenswrapper[5081]: I1003 17:02:15.227357 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6b65dbcc9d-xtvgl" Oct 03 17:02:23 crc kubenswrapper[5081]: I1003 17:02:23.055108 5081 scope.go:117] "RemoveContainer" containerID="3455b04993baa2a82bfe3cdfc8e9f0c9f0688fa565074db5e3207752c834f7a8" Oct 03 17:02:30 crc kubenswrapper[5081]: I1003 17:02:30.647971 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:02:30 crc kubenswrapper[5081]: I1003 17:02:30.648530 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.231072 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-wxmmr"] Oct 03 17:02:35 crc kubenswrapper[5081]: E1003 17:02:35.231990 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerName="dnsmasq-dns" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.232004 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerName="dnsmasq-dns" Oct 03 17:02:35 crc kubenswrapper[5081]: E1003 17:02:35.232039 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerName="init" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.232046 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerName="init" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.232227 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="37a53dd0-cc23-441f-adb2-2f69c2e2c5aa" containerName="dnsmasq-dns" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.232976 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wxmmr" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.241833 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wxmmr"] Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.350631 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcmt5\" (UniqueName: \"kubernetes.io/projected/83c32c48-b8b0-418a-ac7a-e335d0564245-kube-api-access-pcmt5\") pod \"nova-api-db-create-wxmmr\" (UID: \"83c32c48-b8b0-418a-ac7a-e335d0564245\") " pod="openstack/nova-api-db-create-wxmmr" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.428042 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-6f2fx"] Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.429693 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6f2fx" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.439335 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-6f2fx"] Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.452797 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcmt5\" (UniqueName: \"kubernetes.io/projected/83c32c48-b8b0-418a-ac7a-e335d0564245-kube-api-access-pcmt5\") pod \"nova-api-db-create-wxmmr\" (UID: \"83c32c48-b8b0-418a-ac7a-e335d0564245\") " pod="openstack/nova-api-db-create-wxmmr" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.486809 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcmt5\" (UniqueName: \"kubernetes.io/projected/83c32c48-b8b0-418a-ac7a-e335d0564245-kube-api-access-pcmt5\") pod \"nova-api-db-create-wxmmr\" (UID: \"83c32c48-b8b0-418a-ac7a-e335d0564245\") " pod="openstack/nova-api-db-create-wxmmr" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.497652 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-wxcsk"] Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.498924 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-wxcsk" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.523290 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-wxcsk"] Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.553847 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wxmmr" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.555133 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nk86\" (UniqueName: \"kubernetes.io/projected/14f17125-3b49-4560-aaab-3c9f0218d7a5-kube-api-access-7nk86\") pod \"nova-cell0-db-create-6f2fx\" (UID: \"14f17125-3b49-4560-aaab-3c9f0218d7a5\") " pod="openstack/nova-cell0-db-create-6f2fx" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.555185 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkwf4\" (UniqueName: \"kubernetes.io/projected/1cdc9f9a-f530-454b-a477-cfdce21cf6b8-kube-api-access-lkwf4\") pod \"nova-cell1-db-create-wxcsk\" (UID: \"1cdc9f9a-f530-454b-a477-cfdce21cf6b8\") " pod="openstack/nova-cell1-db-create-wxcsk" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.656927 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nk86\" (UniqueName: \"kubernetes.io/projected/14f17125-3b49-4560-aaab-3c9f0218d7a5-kube-api-access-7nk86\") pod \"nova-cell0-db-create-6f2fx\" (UID: \"14f17125-3b49-4560-aaab-3c9f0218d7a5\") " pod="openstack/nova-cell0-db-create-6f2fx" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.657498 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkwf4\" (UniqueName: \"kubernetes.io/projected/1cdc9f9a-f530-454b-a477-cfdce21cf6b8-kube-api-access-lkwf4\") pod \"nova-cell1-db-create-wxcsk\" (UID: \"1cdc9f9a-f530-454b-a477-cfdce21cf6b8\") " pod="openstack/nova-cell1-db-create-wxcsk" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.677601 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nk86\" (UniqueName: \"kubernetes.io/projected/14f17125-3b49-4560-aaab-3c9f0218d7a5-kube-api-access-7nk86\") pod \"nova-cell0-db-create-6f2fx\" (UID: \"14f17125-3b49-4560-aaab-3c9f0218d7a5\") " pod="openstack/nova-cell0-db-create-6f2fx" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.678824 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkwf4\" (UniqueName: \"kubernetes.io/projected/1cdc9f9a-f530-454b-a477-cfdce21cf6b8-kube-api-access-lkwf4\") pod \"nova-cell1-db-create-wxcsk\" (UID: \"1cdc9f9a-f530-454b-a477-cfdce21cf6b8\") " pod="openstack/nova-cell1-db-create-wxcsk" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.748385 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6f2fx" Oct 03 17:02:35 crc kubenswrapper[5081]: I1003 17:02:35.854602 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-wxcsk" Oct 03 17:02:36 crc kubenswrapper[5081]: I1003 17:02:36.003430 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wxmmr"] Oct 03 17:02:36 crc kubenswrapper[5081]: W1003 17:02:36.019966 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83c32c48_b8b0_418a_ac7a_e335d0564245.slice/crio-7524ea13cbe26519f80ee3d022ead4c30ca098ee015d0f98863202b72438e4e2 WatchSource:0}: Error finding container 7524ea13cbe26519f80ee3d022ead4c30ca098ee015d0f98863202b72438e4e2: Status 404 returned error can't find the container with id 7524ea13cbe26519f80ee3d022ead4c30ca098ee015d0f98863202b72438e4e2 Oct 03 17:02:36 crc kubenswrapper[5081]: I1003 17:02:36.171232 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-6f2fx"] Oct 03 17:02:36 crc kubenswrapper[5081]: W1003 17:02:36.181039 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14f17125_3b49_4560_aaab_3c9f0218d7a5.slice/crio-b296632edb8eeb24e81c4d6380b832cf1bb7b29a52081fe6bc83be7aeb7a8bb3 WatchSource:0}: Error finding container b296632edb8eeb24e81c4d6380b832cf1bb7b29a52081fe6bc83be7aeb7a8bb3: Status 404 returned error can't find the container with id b296632edb8eeb24e81c4d6380b832cf1bb7b29a52081fe6bc83be7aeb7a8bb3 Oct 03 17:02:36 crc kubenswrapper[5081]: I1003 17:02:36.279121 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-wxcsk"] Oct 03 17:02:36 crc kubenswrapper[5081]: W1003 17:02:36.290198 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cdc9f9a_f530_454b_a477_cfdce21cf6b8.slice/crio-66379f1abdd0b804c3fea36d3c3c5c5fbf221eb0508fcdcbdceac16417ca89de WatchSource:0}: Error finding container 66379f1abdd0b804c3fea36d3c3c5c5fbf221eb0508fcdcbdceac16417ca89de: Status 404 returned error can't find the container with id 66379f1abdd0b804c3fea36d3c3c5c5fbf221eb0508fcdcbdceac16417ca89de Oct 03 17:02:36 crc kubenswrapper[5081]: I1003 17:02:36.290487 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wxmmr" event={"ID":"83c32c48-b8b0-418a-ac7a-e335d0564245","Type":"ContainerStarted","Data":"bc980de3d642c634828274f02124571b8a21dabd9bb7c5aeb8ca18fc4d50b7ef"} Oct 03 17:02:36 crc kubenswrapper[5081]: I1003 17:02:36.290527 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wxmmr" event={"ID":"83c32c48-b8b0-418a-ac7a-e335d0564245","Type":"ContainerStarted","Data":"7524ea13cbe26519f80ee3d022ead4c30ca098ee015d0f98863202b72438e4e2"} Oct 03 17:02:36 crc kubenswrapper[5081]: I1003 17:02:36.291769 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-6f2fx" event={"ID":"14f17125-3b49-4560-aaab-3c9f0218d7a5","Type":"ContainerStarted","Data":"b296632edb8eeb24e81c4d6380b832cf1bb7b29a52081fe6bc83be7aeb7a8bb3"} Oct 03 17:02:37 crc kubenswrapper[5081]: I1003 17:02:37.301899 5081 generic.go:334] "Generic (PLEG): container finished" podID="83c32c48-b8b0-418a-ac7a-e335d0564245" containerID="bc980de3d642c634828274f02124571b8a21dabd9bb7c5aeb8ca18fc4d50b7ef" exitCode=0 Oct 03 17:02:37 crc kubenswrapper[5081]: I1003 17:02:37.301988 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wxmmr" event={"ID":"83c32c48-b8b0-418a-ac7a-e335d0564245","Type":"ContainerDied","Data":"bc980de3d642c634828274f02124571b8a21dabd9bb7c5aeb8ca18fc4d50b7ef"} Oct 03 17:02:37 crc kubenswrapper[5081]: I1003 17:02:37.305072 5081 generic.go:334] "Generic (PLEG): container finished" podID="1cdc9f9a-f530-454b-a477-cfdce21cf6b8" containerID="16a94ee498ae34e04f01b07a81641612b16b270ae4b5d071a0eb4b8de678cc5d" exitCode=0 Oct 03 17:02:37 crc kubenswrapper[5081]: I1003 17:02:37.305128 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-wxcsk" event={"ID":"1cdc9f9a-f530-454b-a477-cfdce21cf6b8","Type":"ContainerDied","Data":"16a94ee498ae34e04f01b07a81641612b16b270ae4b5d071a0eb4b8de678cc5d"} Oct 03 17:02:37 crc kubenswrapper[5081]: I1003 17:02:37.305282 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-wxcsk" event={"ID":"1cdc9f9a-f530-454b-a477-cfdce21cf6b8","Type":"ContainerStarted","Data":"66379f1abdd0b804c3fea36d3c3c5c5fbf221eb0508fcdcbdceac16417ca89de"} Oct 03 17:02:37 crc kubenswrapper[5081]: I1003 17:02:37.307190 5081 generic.go:334] "Generic (PLEG): container finished" podID="14f17125-3b49-4560-aaab-3c9f0218d7a5" containerID="cdeb5b6f69169e55ef27eec62dcc5401a32ebdee0235b33924d9cb0f8742cdff" exitCode=0 Oct 03 17:02:37 crc kubenswrapper[5081]: I1003 17:02:37.307297 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-6f2fx" event={"ID":"14f17125-3b49-4560-aaab-3c9f0218d7a5","Type":"ContainerDied","Data":"cdeb5b6f69169e55ef27eec62dcc5401a32ebdee0235b33924d9cb0f8742cdff"} Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.630067 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wxmmr" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.715014 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6f2fx" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.723453 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-wxcsk" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.723879 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcmt5\" (UniqueName: \"kubernetes.io/projected/83c32c48-b8b0-418a-ac7a-e335d0564245-kube-api-access-pcmt5\") pod \"83c32c48-b8b0-418a-ac7a-e335d0564245\" (UID: \"83c32c48-b8b0-418a-ac7a-e335d0564245\") " Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.734100 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83c32c48-b8b0-418a-ac7a-e335d0564245-kube-api-access-pcmt5" (OuterVolumeSpecName: "kube-api-access-pcmt5") pod "83c32c48-b8b0-418a-ac7a-e335d0564245" (UID: "83c32c48-b8b0-418a-ac7a-e335d0564245"). InnerVolumeSpecName "kube-api-access-pcmt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.827572 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nk86\" (UniqueName: \"kubernetes.io/projected/14f17125-3b49-4560-aaab-3c9f0218d7a5-kube-api-access-7nk86\") pod \"14f17125-3b49-4560-aaab-3c9f0218d7a5\" (UID: \"14f17125-3b49-4560-aaab-3c9f0218d7a5\") " Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.828200 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkwf4\" (UniqueName: \"kubernetes.io/projected/1cdc9f9a-f530-454b-a477-cfdce21cf6b8-kube-api-access-lkwf4\") pod \"1cdc9f9a-f530-454b-a477-cfdce21cf6b8\" (UID: \"1cdc9f9a-f530-454b-a477-cfdce21cf6b8\") " Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.829037 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcmt5\" (UniqueName: \"kubernetes.io/projected/83c32c48-b8b0-418a-ac7a-e335d0564245-kube-api-access-pcmt5\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.830975 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14f17125-3b49-4560-aaab-3c9f0218d7a5-kube-api-access-7nk86" (OuterVolumeSpecName: "kube-api-access-7nk86") pod "14f17125-3b49-4560-aaab-3c9f0218d7a5" (UID: "14f17125-3b49-4560-aaab-3c9f0218d7a5"). InnerVolumeSpecName "kube-api-access-7nk86". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.831184 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cdc9f9a-f530-454b-a477-cfdce21cf6b8-kube-api-access-lkwf4" (OuterVolumeSpecName: "kube-api-access-lkwf4") pod "1cdc9f9a-f530-454b-a477-cfdce21cf6b8" (UID: "1cdc9f9a-f530-454b-a477-cfdce21cf6b8"). InnerVolumeSpecName "kube-api-access-lkwf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.930863 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkwf4\" (UniqueName: \"kubernetes.io/projected/1cdc9f9a-f530-454b-a477-cfdce21cf6b8-kube-api-access-lkwf4\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:38 crc kubenswrapper[5081]: I1003 17:02:38.930902 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nk86\" (UniqueName: \"kubernetes.io/projected/14f17125-3b49-4560-aaab-3c9f0218d7a5-kube-api-access-7nk86\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.324004 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-wxcsk" Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.323991 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-wxcsk" event={"ID":"1cdc9f9a-f530-454b-a477-cfdce21cf6b8","Type":"ContainerDied","Data":"66379f1abdd0b804c3fea36d3c3c5c5fbf221eb0508fcdcbdceac16417ca89de"} Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.324125 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66379f1abdd0b804c3fea36d3c3c5c5fbf221eb0508fcdcbdceac16417ca89de" Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.327232 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-6f2fx" event={"ID":"14f17125-3b49-4560-aaab-3c9f0218d7a5","Type":"ContainerDied","Data":"b296632edb8eeb24e81c4d6380b832cf1bb7b29a52081fe6bc83be7aeb7a8bb3"} Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.327266 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6f2fx" Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.327283 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b296632edb8eeb24e81c4d6380b832cf1bb7b29a52081fe6bc83be7aeb7a8bb3" Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.329269 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wxmmr" event={"ID":"83c32c48-b8b0-418a-ac7a-e335d0564245","Type":"ContainerDied","Data":"7524ea13cbe26519f80ee3d022ead4c30ca098ee015d0f98863202b72438e4e2"} Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.329317 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wxmmr" Oct 03 17:02:39 crc kubenswrapper[5081]: I1003 17:02:39.329325 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7524ea13cbe26519f80ee3d022ead4c30ca098ee015d0f98863202b72438e4e2" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.455871 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-4298-account-create-2g4vp"] Oct 03 17:02:45 crc kubenswrapper[5081]: E1003 17:02:45.456902 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cdc9f9a-f530-454b-a477-cfdce21cf6b8" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.456920 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cdc9f9a-f530-454b-a477-cfdce21cf6b8" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: E1003 17:02:45.456932 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f17125-3b49-4560-aaab-3c9f0218d7a5" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.456939 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f17125-3b49-4560-aaab-3c9f0218d7a5" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: E1003 17:02:45.456970 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83c32c48-b8b0-418a-ac7a-e335d0564245" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.456976 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="83c32c48-b8b0-418a-ac7a-e335d0564245" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.457151 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="14f17125-3b49-4560-aaab-3c9f0218d7a5" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.457169 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cdc9f9a-f530-454b-a477-cfdce21cf6b8" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.457177 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="83c32c48-b8b0-418a-ac7a-e335d0564245" containerName="mariadb-database-create" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.460131 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4298-account-create-2g4vp" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.466404 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.492798 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4298-account-create-2g4vp"] Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.549875 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cj6w\" (UniqueName: \"kubernetes.io/projected/2a478b2d-4691-4c07-a388-81448d8d15d6-kube-api-access-9cj6w\") pod \"nova-api-4298-account-create-2g4vp\" (UID: \"2a478b2d-4691-4c07-a388-81448d8d15d6\") " pod="openstack/nova-api-4298-account-create-2g4vp" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.653784 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cj6w\" (UniqueName: \"kubernetes.io/projected/2a478b2d-4691-4c07-a388-81448d8d15d6-kube-api-access-9cj6w\") pod \"nova-api-4298-account-create-2g4vp\" (UID: \"2a478b2d-4691-4c07-a388-81448d8d15d6\") " pod="openstack/nova-api-4298-account-create-2g4vp" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.661155 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-2fd7-account-create-frv4k"] Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.662538 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2fd7-account-create-frv4k" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.664460 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.675494 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2fd7-account-create-frv4k"] Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.676484 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cj6w\" (UniqueName: \"kubernetes.io/projected/2a478b2d-4691-4c07-a388-81448d8d15d6-kube-api-access-9cj6w\") pod \"nova-api-4298-account-create-2g4vp\" (UID: \"2a478b2d-4691-4c07-a388-81448d8d15d6\") " pod="openstack/nova-api-4298-account-create-2g4vp" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.754066 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4098-account-create-tfpsk"] Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.755352 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfs59\" (UniqueName: \"kubernetes.io/projected/4ded1a9c-6511-4384-8d65-c32ad512869f-kube-api-access-dfs59\") pod \"nova-cell0-2fd7-account-create-frv4k\" (UID: \"4ded1a9c-6511-4384-8d65-c32ad512869f\") " pod="openstack/nova-cell0-2fd7-account-create-frv4k" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.755702 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4098-account-create-tfpsk" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.757595 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.765627 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4098-account-create-tfpsk"] Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.788534 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4298-account-create-2g4vp" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.857494 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfs59\" (UniqueName: \"kubernetes.io/projected/4ded1a9c-6511-4384-8d65-c32ad512869f-kube-api-access-dfs59\") pod \"nova-cell0-2fd7-account-create-frv4k\" (UID: \"4ded1a9c-6511-4384-8d65-c32ad512869f\") " pod="openstack/nova-cell0-2fd7-account-create-frv4k" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.857588 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlh5q\" (UniqueName: \"kubernetes.io/projected/f93c25e6-31cd-46d1-bfe1-ebffac538dc7-kube-api-access-mlh5q\") pod \"nova-cell1-4098-account-create-tfpsk\" (UID: \"f93c25e6-31cd-46d1-bfe1-ebffac538dc7\") " pod="openstack/nova-cell1-4098-account-create-tfpsk" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.877653 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfs59\" (UniqueName: \"kubernetes.io/projected/4ded1a9c-6511-4384-8d65-c32ad512869f-kube-api-access-dfs59\") pod \"nova-cell0-2fd7-account-create-frv4k\" (UID: \"4ded1a9c-6511-4384-8d65-c32ad512869f\") " pod="openstack/nova-cell0-2fd7-account-create-frv4k" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.962538 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlh5q\" (UniqueName: \"kubernetes.io/projected/f93c25e6-31cd-46d1-bfe1-ebffac538dc7-kube-api-access-mlh5q\") pod \"nova-cell1-4098-account-create-tfpsk\" (UID: \"f93c25e6-31cd-46d1-bfe1-ebffac538dc7\") " pod="openstack/nova-cell1-4098-account-create-tfpsk" Oct 03 17:02:45 crc kubenswrapper[5081]: I1003 17:02:45.982089 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlh5q\" (UniqueName: \"kubernetes.io/projected/f93c25e6-31cd-46d1-bfe1-ebffac538dc7-kube-api-access-mlh5q\") pod \"nova-cell1-4098-account-create-tfpsk\" (UID: \"f93c25e6-31cd-46d1-bfe1-ebffac538dc7\") " pod="openstack/nova-cell1-4098-account-create-tfpsk" Oct 03 17:02:46 crc kubenswrapper[5081]: I1003 17:02:46.023352 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2fd7-account-create-frv4k" Oct 03 17:02:46 crc kubenswrapper[5081]: I1003 17:02:46.077262 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4098-account-create-tfpsk" Oct 03 17:02:46 crc kubenswrapper[5081]: I1003 17:02:46.206474 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4298-account-create-2g4vp"] Oct 03 17:02:46 crc kubenswrapper[5081]: I1003 17:02:46.388662 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4298-account-create-2g4vp" event={"ID":"2a478b2d-4691-4c07-a388-81448d8d15d6","Type":"ContainerStarted","Data":"51de9abb7ef9397598373fdc92eb8c9fdd35939b48610a411662406d880f5e02"} Oct 03 17:02:46 crc kubenswrapper[5081]: I1003 17:02:46.449659 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2fd7-account-create-frv4k"] Oct 03 17:02:46 crc kubenswrapper[5081]: W1003 17:02:46.451750 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ded1a9c_6511_4384_8d65_c32ad512869f.slice/crio-f848918e8b467ede731b23a6d0205c9361a96a934c0aee6ef9d31070704642e9 WatchSource:0}: Error finding container f848918e8b467ede731b23a6d0205c9361a96a934c0aee6ef9d31070704642e9: Status 404 returned error can't find the container with id f848918e8b467ede731b23a6d0205c9361a96a934c0aee6ef9d31070704642e9 Oct 03 17:02:46 crc kubenswrapper[5081]: I1003 17:02:46.553323 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4098-account-create-tfpsk"] Oct 03 17:02:46 crc kubenswrapper[5081]: W1003 17:02:46.560499 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf93c25e6_31cd_46d1_bfe1_ebffac538dc7.slice/crio-c4f6a2922c6123aef03c55bac6bb05b3d7986a61c7ded97c3e118a8a16ec45ee WatchSource:0}: Error finding container c4f6a2922c6123aef03c55bac6bb05b3d7986a61c7ded97c3e118a8a16ec45ee: Status 404 returned error can't find the container with id c4f6a2922c6123aef03c55bac6bb05b3d7986a61c7ded97c3e118a8a16ec45ee Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.398349 5081 generic.go:334] "Generic (PLEG): container finished" podID="4ded1a9c-6511-4384-8d65-c32ad512869f" containerID="aa0ea7d9b2f2e300ca1223288b7b738553819976dfe93b1697e97b39f6e53190" exitCode=0 Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.398406 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2fd7-account-create-frv4k" event={"ID":"4ded1a9c-6511-4384-8d65-c32ad512869f","Type":"ContainerDied","Data":"aa0ea7d9b2f2e300ca1223288b7b738553819976dfe93b1697e97b39f6e53190"} Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.398461 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2fd7-account-create-frv4k" event={"ID":"4ded1a9c-6511-4384-8d65-c32ad512869f","Type":"ContainerStarted","Data":"f848918e8b467ede731b23a6d0205c9361a96a934c0aee6ef9d31070704642e9"} Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.400157 5081 generic.go:334] "Generic (PLEG): container finished" podID="f93c25e6-31cd-46d1-bfe1-ebffac538dc7" containerID="412d0795d8122ee3ff918dabc055d9da6a6d12bace8257bcb20797875dc4bf3e" exitCode=0 Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.400236 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4098-account-create-tfpsk" event={"ID":"f93c25e6-31cd-46d1-bfe1-ebffac538dc7","Type":"ContainerDied","Data":"412d0795d8122ee3ff918dabc055d9da6a6d12bace8257bcb20797875dc4bf3e"} Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.400253 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4098-account-create-tfpsk" event={"ID":"f93c25e6-31cd-46d1-bfe1-ebffac538dc7","Type":"ContainerStarted","Data":"c4f6a2922c6123aef03c55bac6bb05b3d7986a61c7ded97c3e118a8a16ec45ee"} Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.401608 5081 generic.go:334] "Generic (PLEG): container finished" podID="2a478b2d-4691-4c07-a388-81448d8d15d6" containerID="b3364c3692145944bdf73ffbf6be822036d712c4aefbe17ac4872c54b8e0f510" exitCode=0 Oct 03 17:02:47 crc kubenswrapper[5081]: I1003 17:02:47.401641 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4298-account-create-2g4vp" event={"ID":"2a478b2d-4691-4c07-a388-81448d8d15d6","Type":"ContainerDied","Data":"b3364c3692145944bdf73ffbf6be822036d712c4aefbe17ac4872c54b8e0f510"} Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.682547 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2fd7-account-create-frv4k" Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.804073 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4098-account-create-tfpsk" Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.811493 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4298-account-create-2g4vp" Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.818107 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfs59\" (UniqueName: \"kubernetes.io/projected/4ded1a9c-6511-4384-8d65-c32ad512869f-kube-api-access-dfs59\") pod \"4ded1a9c-6511-4384-8d65-c32ad512869f\" (UID: \"4ded1a9c-6511-4384-8d65-c32ad512869f\") " Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.823572 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ded1a9c-6511-4384-8d65-c32ad512869f-kube-api-access-dfs59" (OuterVolumeSpecName: "kube-api-access-dfs59") pod "4ded1a9c-6511-4384-8d65-c32ad512869f" (UID: "4ded1a9c-6511-4384-8d65-c32ad512869f"). InnerVolumeSpecName "kube-api-access-dfs59". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.919605 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlh5q\" (UniqueName: \"kubernetes.io/projected/f93c25e6-31cd-46d1-bfe1-ebffac538dc7-kube-api-access-mlh5q\") pod \"f93c25e6-31cd-46d1-bfe1-ebffac538dc7\" (UID: \"f93c25e6-31cd-46d1-bfe1-ebffac538dc7\") " Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.919726 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cj6w\" (UniqueName: \"kubernetes.io/projected/2a478b2d-4691-4c07-a388-81448d8d15d6-kube-api-access-9cj6w\") pod \"2a478b2d-4691-4c07-a388-81448d8d15d6\" (UID: \"2a478b2d-4691-4c07-a388-81448d8d15d6\") " Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.920486 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfs59\" (UniqueName: \"kubernetes.io/projected/4ded1a9c-6511-4384-8d65-c32ad512869f-kube-api-access-dfs59\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.922468 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f93c25e6-31cd-46d1-bfe1-ebffac538dc7-kube-api-access-mlh5q" (OuterVolumeSpecName: "kube-api-access-mlh5q") pod "f93c25e6-31cd-46d1-bfe1-ebffac538dc7" (UID: "f93c25e6-31cd-46d1-bfe1-ebffac538dc7"). InnerVolumeSpecName "kube-api-access-mlh5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:02:48 crc kubenswrapper[5081]: I1003 17:02:48.922773 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a478b2d-4691-4c07-a388-81448d8d15d6-kube-api-access-9cj6w" (OuterVolumeSpecName: "kube-api-access-9cj6w") pod "2a478b2d-4691-4c07-a388-81448d8d15d6" (UID: "2a478b2d-4691-4c07-a388-81448d8d15d6"). InnerVolumeSpecName "kube-api-access-9cj6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.022660 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlh5q\" (UniqueName: \"kubernetes.io/projected/f93c25e6-31cd-46d1-bfe1-ebffac538dc7-kube-api-access-mlh5q\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.022701 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cj6w\" (UniqueName: \"kubernetes.io/projected/2a478b2d-4691-4c07-a388-81448d8d15d6-kube-api-access-9cj6w\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.418863 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2fd7-account-create-frv4k" event={"ID":"4ded1a9c-6511-4384-8d65-c32ad512869f","Type":"ContainerDied","Data":"f848918e8b467ede731b23a6d0205c9361a96a934c0aee6ef9d31070704642e9"} Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.419188 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f848918e8b467ede731b23a6d0205c9361a96a934c0aee6ef9d31070704642e9" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.418882 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2fd7-account-create-frv4k" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.420307 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4098-account-create-tfpsk" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.420304 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4098-account-create-tfpsk" event={"ID":"f93c25e6-31cd-46d1-bfe1-ebffac538dc7","Type":"ContainerDied","Data":"c4f6a2922c6123aef03c55bac6bb05b3d7986a61c7ded97c3e118a8a16ec45ee"} Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.420457 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4f6a2922c6123aef03c55bac6bb05b3d7986a61c7ded97c3e118a8a16ec45ee" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.421635 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4298-account-create-2g4vp" event={"ID":"2a478b2d-4691-4c07-a388-81448d8d15d6","Type":"ContainerDied","Data":"51de9abb7ef9397598373fdc92eb8c9fdd35939b48610a411662406d880f5e02"} Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.421670 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51de9abb7ef9397598373fdc92eb8c9fdd35939b48610a411662406d880f5e02" Oct 03 17:02:49 crc kubenswrapper[5081]: I1003 17:02:49.421678 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4298-account-create-2g4vp" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.847135 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jzxch"] Oct 03 17:02:50 crc kubenswrapper[5081]: E1003 17:02:50.847495 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f93c25e6-31cd-46d1-bfe1-ebffac538dc7" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.847508 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f93c25e6-31cd-46d1-bfe1-ebffac538dc7" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: E1003 17:02:50.847546 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a478b2d-4691-4c07-a388-81448d8d15d6" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.847552 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a478b2d-4691-4c07-a388-81448d8d15d6" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: E1003 17:02:50.847577 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ded1a9c-6511-4384-8d65-c32ad512869f" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.847583 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ded1a9c-6511-4384-8d65-c32ad512869f" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.847745 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ded1a9c-6511-4384-8d65-c32ad512869f" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.847764 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a478b2d-4691-4c07-a388-81448d8d15d6" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.847777 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f93c25e6-31cd-46d1-bfe1-ebffac538dc7" containerName="mariadb-account-create" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.848343 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.850878 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.850929 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-t5ltk" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.851097 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.866323 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jzxch"] Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.958524 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-config-data\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.958805 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.958931 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwzhk\" (UniqueName: \"kubernetes.io/projected/be88c3fe-132e-482b-9d6f-f7c72ef66228-kube-api-access-gwzhk\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:50 crc kubenswrapper[5081]: I1003 17:02:50.959081 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-scripts\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.060785 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-config-data\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.061072 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.061707 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwzhk\" (UniqueName: \"kubernetes.io/projected/be88c3fe-132e-482b-9d6f-f7c72ef66228-kube-api-access-gwzhk\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.061921 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-scripts\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.075638 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.076089 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-scripts\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.077280 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-config-data\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.078874 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwzhk\" (UniqueName: \"kubernetes.io/projected/be88c3fe-132e-482b-9d6f-f7c72ef66228-kube-api-access-gwzhk\") pod \"nova-cell0-conductor-db-sync-jzxch\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.167173 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:51 crc kubenswrapper[5081]: I1003 17:02:51.582861 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jzxch"] Oct 03 17:02:52 crc kubenswrapper[5081]: I1003 17:02:52.446107 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jzxch" event={"ID":"be88c3fe-132e-482b-9d6f-f7c72ef66228","Type":"ContainerStarted","Data":"d244b260857b05e96f73a2f3b782fffecdd493d9c2191fa12e62c333aaeba24b"} Oct 03 17:02:52 crc kubenswrapper[5081]: I1003 17:02:52.446155 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jzxch" event={"ID":"be88c3fe-132e-482b-9d6f-f7c72ef66228","Type":"ContainerStarted","Data":"89db6910efbb8a01dac04db3b6bac42ee8d3a5c485bd7863683d741db4464f75"} Oct 03 17:02:52 crc kubenswrapper[5081]: I1003 17:02:52.465768 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-jzxch" podStartSLOduration=2.465751239 podStartE2EDuration="2.465751239s" podCreationTimestamp="2025-10-03 17:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:02:52.456769251 +0000 UTC m=+5691.422325874" watchObservedRunningTime="2025-10-03 17:02:52.465751239 +0000 UTC m=+5691.431307852" Oct 03 17:02:57 crc kubenswrapper[5081]: I1003 17:02:57.514721 5081 generic.go:334] "Generic (PLEG): container finished" podID="be88c3fe-132e-482b-9d6f-f7c72ef66228" containerID="d244b260857b05e96f73a2f3b782fffecdd493d9c2191fa12e62c333aaeba24b" exitCode=0 Oct 03 17:02:57 crc kubenswrapper[5081]: I1003 17:02:57.515182 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jzxch" event={"ID":"be88c3fe-132e-482b-9d6f-f7c72ef66228","Type":"ContainerDied","Data":"d244b260857b05e96f73a2f3b782fffecdd493d9c2191fa12e62c333aaeba24b"} Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.838980 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.893853 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-config-data\") pod \"be88c3fe-132e-482b-9d6f-f7c72ef66228\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.893972 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwzhk\" (UniqueName: \"kubernetes.io/projected/be88c3fe-132e-482b-9d6f-f7c72ef66228-kube-api-access-gwzhk\") pod \"be88c3fe-132e-482b-9d6f-f7c72ef66228\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.894064 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-combined-ca-bundle\") pod \"be88c3fe-132e-482b-9d6f-f7c72ef66228\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.894113 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-scripts\") pod \"be88c3fe-132e-482b-9d6f-f7c72ef66228\" (UID: \"be88c3fe-132e-482b-9d6f-f7c72ef66228\") " Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.906985 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-scripts" (OuterVolumeSpecName: "scripts") pod "be88c3fe-132e-482b-9d6f-f7c72ef66228" (UID: "be88c3fe-132e-482b-9d6f-f7c72ef66228"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.908321 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be88c3fe-132e-482b-9d6f-f7c72ef66228-kube-api-access-gwzhk" (OuterVolumeSpecName: "kube-api-access-gwzhk") pod "be88c3fe-132e-482b-9d6f-f7c72ef66228" (UID: "be88c3fe-132e-482b-9d6f-f7c72ef66228"). InnerVolumeSpecName "kube-api-access-gwzhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.922414 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be88c3fe-132e-482b-9d6f-f7c72ef66228" (UID: "be88c3fe-132e-482b-9d6f-f7c72ef66228"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.924249 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-config-data" (OuterVolumeSpecName: "config-data") pod "be88c3fe-132e-482b-9d6f-f7c72ef66228" (UID: "be88c3fe-132e-482b-9d6f-f7c72ef66228"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.996674 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.996942 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwzhk\" (UniqueName: \"kubernetes.io/projected/be88c3fe-132e-482b-9d6f-f7c72ef66228-kube-api-access-gwzhk\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.997002 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:58 crc kubenswrapper[5081]: I1003 17:02:58.997055 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be88c3fe-132e-482b-9d6f-f7c72ef66228-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.532645 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jzxch" event={"ID":"be88c3fe-132e-482b-9d6f-f7c72ef66228","Type":"ContainerDied","Data":"89db6910efbb8a01dac04db3b6bac42ee8d3a5c485bd7863683d741db4464f75"} Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.532689 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89db6910efbb8a01dac04db3b6bac42ee8d3a5c485bd7863683d741db4464f75" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.532750 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jzxch" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.604365 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:02:59 crc kubenswrapper[5081]: E1003 17:02:59.605273 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be88c3fe-132e-482b-9d6f-f7c72ef66228" containerName="nova-cell0-conductor-db-sync" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.605385 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="be88c3fe-132e-482b-9d6f-f7c72ef66228" containerName="nova-cell0-conductor-db-sync" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.605795 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="be88c3fe-132e-482b-9d6f-f7c72ef66228" containerName="nova-cell0-conductor-db-sync" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.606954 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.609487 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.609749 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-t5ltk" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.614377 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.708535 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.709224 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm8bg\" (UniqueName: \"kubernetes.io/projected/0d01708d-75cb-411e-95ac-d3458148063e-kube-api-access-nm8bg\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.709307 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.811470 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.811576 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm8bg\" (UniqueName: \"kubernetes.io/projected/0d01708d-75cb-411e-95ac-d3458148063e-kube-api-access-nm8bg\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.811637 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.818872 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.818912 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.830868 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm8bg\" (UniqueName: \"kubernetes.io/projected/0d01708d-75cb-411e-95ac-d3458148063e-kube-api-access-nm8bg\") pod \"nova-cell0-conductor-0\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:02:59 crc kubenswrapper[5081]: I1003 17:02:59.935169 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 17:03:00 crc kubenswrapper[5081]: I1003 17:03:00.383096 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:03:00 crc kubenswrapper[5081]: I1003 17:03:00.543969 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0d01708d-75cb-411e-95ac-d3458148063e","Type":"ContainerStarted","Data":"3752891d8a21c3515c1b4aea73a92f0cf750d6d3a930dd9e5d9c77d1f9bcab04"} Oct 03 17:03:00 crc kubenswrapper[5081]: I1003 17:03:00.647298 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:03:00 crc kubenswrapper[5081]: I1003 17:03:00.647360 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:03:00 crc kubenswrapper[5081]: I1003 17:03:00.647412 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:03:00 crc kubenswrapper[5081]: I1003 17:03:00.648186 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:03:00 crc kubenswrapper[5081]: I1003 17:03:00.648260 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" gracePeriod=600 Oct 03 17:03:00 crc kubenswrapper[5081]: E1003 17:03:00.779479 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:03:01 crc kubenswrapper[5081]: I1003 17:03:01.552969 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0d01708d-75cb-411e-95ac-d3458148063e","Type":"ContainerStarted","Data":"5b412de9f22f243cc157c9c6bb27e1610b23d48c125e4ebd5c76d4cb4bd4fb96"} Oct 03 17:03:01 crc kubenswrapper[5081]: I1003 17:03:01.555554 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 03 17:03:01 crc kubenswrapper[5081]: I1003 17:03:01.558036 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" exitCode=0 Oct 03 17:03:01 crc kubenswrapper[5081]: I1003 17:03:01.558200 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3"} Oct 03 17:03:01 crc kubenswrapper[5081]: I1003 17:03:01.558313 5081 scope.go:117] "RemoveContainer" containerID="8921388ca71753ab3c7b893bdb2f477facaefffa8aca97d42c54c24634476e41" Oct 03 17:03:01 crc kubenswrapper[5081]: I1003 17:03:01.559241 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:03:01 crc kubenswrapper[5081]: E1003 17:03:01.559621 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:03:01 crc kubenswrapper[5081]: I1003 17:03:01.580918 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.580896675 podStartE2EDuration="2.580896675s" podCreationTimestamp="2025-10-03 17:02:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:01.577096526 +0000 UTC m=+5700.542653159" watchObservedRunningTime="2025-10-03 17:03:01.580896675 +0000 UTC m=+5700.546453288" Oct 03 17:03:09 crc kubenswrapper[5081]: I1003 17:03:09.958398 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.440848 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-78n8t"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.442306 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.444362 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.444873 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.452474 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-78n8t"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.496634 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-config-data\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.496756 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.496784 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cf25\" (UniqueName: \"kubernetes.io/projected/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-kube-api-access-8cf25\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.496870 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-scripts\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.581327 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.583088 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.587986 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.598004 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.598739 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.598785 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cf25\" (UniqueName: \"kubernetes.io/projected/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-kube-api-access-8cf25\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.598840 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-scripts\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.598967 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-config-data\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.600827 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.609886 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.610415 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-scripts\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.611842 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-config-data\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.621624 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.635443 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cf25\" (UniqueName: \"kubernetes.io/projected/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-kube-api-access-8cf25\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.639735 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-78n8t\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.653153 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.700460 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-config-data\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.700518 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.700539 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z967\" (UniqueName: \"kubernetes.io/projected/fa6a909e-8cb5-4016-bc91-f5bcd8859736-kube-api-access-7z967\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.700600 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4brdb\" (UniqueName: \"kubernetes.io/projected/7c45a8eb-08c5-4a88-ae2f-835953e14a86-kube-api-access-4brdb\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.700635 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.700654 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa6a909e-8cb5-4016-bc91-f5bcd8859736-logs\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.700675 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.728370 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.730119 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.735068 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.748716 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.750427 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.757294 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.764538 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.781642 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.799092 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802160 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802267 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa6a909e-8cb5-4016-bc91-f5bcd8859736-logs\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802363 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prf7v\" (UniqueName: \"kubernetes.io/projected/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-kube-api-access-prf7v\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802436 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802460 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-config-data\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802509 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802588 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7wvh\" (UniqueName: \"kubernetes.io/projected/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-kube-api-access-w7wvh\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802632 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802675 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-config-data\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802695 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-config-data\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802749 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802768 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-logs\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802785 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z967\" (UniqueName: \"kubernetes.io/projected/fa6a909e-8cb5-4016-bc91-f5bcd8859736-kube-api-access-7z967\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802860 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4brdb\" (UniqueName: \"kubernetes.io/projected/7c45a8eb-08c5-4a88-ae2f-835953e14a86-kube-api-access-4brdb\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.802922 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa6a909e-8cb5-4016-bc91-f5bcd8859736-logs\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.819728 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-config-data\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.830964 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.831279 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.831487 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4brdb\" (UniqueName: \"kubernetes.io/projected/7c45a8eb-08c5-4a88-ae2f-835953e14a86-kube-api-access-4brdb\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.831817 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.833113 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z967\" (UniqueName: \"kubernetes.io/projected/fa6a909e-8cb5-4016-bc91-f5bcd8859736-kube-api-access-7z967\") pod \"nova-api-0\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " pod="openstack/nova-api-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.868989 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5695b4b4c5-xj44x"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.871065 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.873256 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5695b4b4c5-xj44x"] Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904010 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-nb\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904063 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-config\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904114 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prf7v\" (UniqueName: \"kubernetes.io/projected/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-kube-api-access-prf7v\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904142 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-config-data\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904163 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904205 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-sb\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904244 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7wvh\" (UniqueName: \"kubernetes.io/projected/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-kube-api-access-w7wvh\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904297 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904332 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-config-data\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904363 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qj2t\" (UniqueName: \"kubernetes.io/projected/ab9407e5-195d-4ddb-ae06-562042ba3db9-kube-api-access-7qj2t\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904396 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-logs\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.904423 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-dns-svc\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.912521 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-config-data\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.914009 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-logs\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.914385 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.924471 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.928539 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.929149 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-config-data\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.930787 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prf7v\" (UniqueName: \"kubernetes.io/projected/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-kube-api-access-prf7v\") pod \"nova-scheduler-0\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:10 crc kubenswrapper[5081]: I1003 17:03:10.935172 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7wvh\" (UniqueName: \"kubernetes.io/projected/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-kube-api-access-w7wvh\") pod \"nova-metadata-0\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " pod="openstack/nova-metadata-0" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.006074 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qj2t\" (UniqueName: \"kubernetes.io/projected/ab9407e5-195d-4ddb-ae06-562042ba3db9-kube-api-access-7qj2t\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.006134 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-dns-svc\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.006195 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-nb\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.006223 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-config\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.006263 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-sb\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.007421 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-sb\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.007595 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-dns-svc\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.008092 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-nb\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.008273 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-config\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.025312 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qj2t\" (UniqueName: \"kubernetes.io/projected/ab9407e5-195d-4ddb-ae06-562042ba3db9-kube-api-access-7qj2t\") pod \"dnsmasq-dns-5695b4b4c5-xj44x\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.029201 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.052027 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.081815 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.210717 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.310022 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-78n8t"] Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.482454 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:03:11 crc kubenswrapper[5081]: W1003 17:03:11.493148 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c45a8eb_08c5_4a88_ae2f_835953e14a86.slice/crio-4a141bf3c8a93264f993466718b9af4e104f9e441ae51c19eae431486ba3f5b9 WatchSource:0}: Error finding container 4a141bf3c8a93264f993466718b9af4e104f9e441ae51c19eae431486ba3f5b9: Status 404 returned error can't find the container with id 4a141bf3c8a93264f993466718b9af4e104f9e441ae51c19eae431486ba3f5b9 Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.625780 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r4922"] Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.627148 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.629997 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.630223 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.637857 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.653506 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r4922"] Oct 03 17:03:11 crc kubenswrapper[5081]: W1003 17:03:11.655017 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa6a909e_8cb5_4016_bc91_f5bcd8859736.slice/crio-6329afa4181db48e08c5ed0fc05943ba7413cf2ca8b16fbb9c90f460141168ad WatchSource:0}: Error finding container 6329afa4181db48e08c5ed0fc05943ba7413cf2ca8b16fbb9c90f460141168ad: Status 404 returned error can't find the container with id 6329afa4181db48e08c5ed0fc05943ba7413cf2ca8b16fbb9c90f460141168ad Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.686221 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7c45a8eb-08c5-4a88-ae2f-835953e14a86","Type":"ContainerStarted","Data":"4a141bf3c8a93264f993466718b9af4e104f9e441ae51c19eae431486ba3f5b9"} Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.687479 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa6a909e-8cb5-4016-bc91-f5bcd8859736","Type":"ContainerStarted","Data":"6329afa4181db48e08c5ed0fc05943ba7413cf2ca8b16fbb9c90f460141168ad"} Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.689475 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-78n8t" event={"ID":"1802be7f-0d59-41b3-8e2d-ec4abf6a221a","Type":"ContainerStarted","Data":"52ca2cf3864549b0fa6d1ef8a3c2cdee015fef36d3283a6344bf6e0b550c9f6d"} Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.689496 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-78n8t" event={"ID":"1802be7f-0d59-41b3-8e2d-ec4abf6a221a","Type":"ContainerStarted","Data":"cadae07470bbd6c9cca4e0d0ee4a2125c0b768968b6fa2a4ee002f0fa218feee"} Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.718222 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.719341 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-scripts\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.719618 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-config-data\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.719781 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6xq2\" (UniqueName: \"kubernetes.io/projected/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-kube-api-access-w6xq2\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.719943 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.723791 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-78n8t" podStartSLOduration=1.72376931 podStartE2EDuration="1.72376931s" podCreationTimestamp="2025-10-03 17:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:11.705739121 +0000 UTC m=+5710.671295754" watchObservedRunningTime="2025-10-03 17:03:11.72376931 +0000 UTC m=+5710.689325913" Oct 03 17:03:11 crc kubenswrapper[5081]: W1003 17:03:11.724856 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bbe5cc8_1f85_4de2_b3eb_31d203f6c1d7.slice/crio-9e4739ad83e5c2a9f7bfbbdeee69f72d06429845b3e32df69dda1e85ac37a6c1 WatchSource:0}: Error finding container 9e4739ad83e5c2a9f7bfbbdeee69f72d06429845b3e32df69dda1e85ac37a6c1: Status 404 returned error can't find the container with id 9e4739ad83e5c2a9f7bfbbdeee69f72d06429845b3e32df69dda1e85ac37a6c1 Oct 03 17:03:11 crc kubenswrapper[5081]: W1003 17:03:11.727598 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcdb9ce75_5626_41ef_b98b_f07e5f5d44c1.slice/crio-b05e98add01bae36fb506494d7b7b49d60c5d056c5187b0e99f770ce62938439 WatchSource:0}: Error finding container b05e98add01bae36fb506494d7b7b49d60c5d056c5187b0e99f770ce62938439: Status 404 returned error can't find the container with id b05e98add01bae36fb506494d7b7b49d60c5d056c5187b0e99f770ce62938439 Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.736372 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.822486 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-config-data\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.822588 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6xq2\" (UniqueName: \"kubernetes.io/projected/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-kube-api-access-w6xq2\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.822641 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.822761 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-scripts\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.827181 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-config-data\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.827178 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.844242 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-scripts\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.862371 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6xq2\" (UniqueName: \"kubernetes.io/projected/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-kube-api-access-w6xq2\") pod \"nova-cell1-conductor-db-sync-r4922\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.863938 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5695b4b4c5-xj44x"] Oct 03 17:03:11 crc kubenswrapper[5081]: I1003 17:03:11.986023 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:12 crc kubenswrapper[5081]: W1003 17:03:12.494032 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16d3f66b_7d30_4e2e_a731_bb11a58a3cf6.slice/crio-57d1717fe1d3582bc1664c5c812115e7c956fc7569c4d71829710f8c449369e2 WatchSource:0}: Error finding container 57d1717fe1d3582bc1664c5c812115e7c956fc7569c4d71829710f8c449369e2: Status 404 returned error can't find the container with id 57d1717fe1d3582bc1664c5c812115e7c956fc7569c4d71829710f8c449369e2 Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.498965 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r4922"] Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.700373 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7c45a8eb-08c5-4a88-ae2f-835953e14a86","Type":"ContainerStarted","Data":"69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.702599 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1","Type":"ContainerStarted","Data":"af1000f55e658d9d3ef7e3d430e4067d2b6f2ca1a44d97459f4bf149827fb6e0"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.702636 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1","Type":"ContainerStarted","Data":"848953d238f641249a125730dbbc4ea5aabc8aa7b662f99f495dadf4db8a6c99"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.702650 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1","Type":"ContainerStarted","Data":"b05e98add01bae36fb506494d7b7b49d60c5d056c5187b0e99f770ce62938439"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.705496 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa6a909e-8cb5-4016-bc91-f5bcd8859736","Type":"ContainerStarted","Data":"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.705793 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa6a909e-8cb5-4016-bc91-f5bcd8859736","Type":"ContainerStarted","Data":"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.714938 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r4922" event={"ID":"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6","Type":"ContainerStarted","Data":"934fae996577b5e325bc12e7ea71af5517c95424d14cdc78d3d2efec358c1451"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.715356 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r4922" event={"ID":"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6","Type":"ContainerStarted","Data":"57d1717fe1d3582bc1664c5c812115e7c956fc7569c4d71829710f8c449369e2"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.722254 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7","Type":"ContainerStarted","Data":"15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.722652 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7","Type":"ContainerStarted","Data":"9e4739ad83e5c2a9f7bfbbdeee69f72d06429845b3e32df69dda1e85ac37a6c1"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.725525 5081 generic.go:334] "Generic (PLEG): container finished" podID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerID="745dc5a34c89a4f8129635bff13802ff9a323f772c53a1c3c46d070d22b6cdca" exitCode=0 Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.725988 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" event={"ID":"ab9407e5-195d-4ddb-ae06-562042ba3db9","Type":"ContainerDied","Data":"745dc5a34c89a4f8129635bff13802ff9a323f772c53a1c3c46d070d22b6cdca"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.726059 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" event={"ID":"ab9407e5-195d-4ddb-ae06-562042ba3db9","Type":"ContainerStarted","Data":"db6b2e013afd79c00ba4239646d083da40a897ebb1a22801b28d35eee40dd314"} Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.733284 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.733264019 podStartE2EDuration="2.733264019s" podCreationTimestamp="2025-10-03 17:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:12.721870552 +0000 UTC m=+5711.687427185" watchObservedRunningTime="2025-10-03 17:03:12.733264019 +0000 UTC m=+5711.698820642" Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.746243 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.7462252019999998 podStartE2EDuration="2.746225202s" podCreationTimestamp="2025-10-03 17:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:12.741996911 +0000 UTC m=+5711.707553534" watchObservedRunningTime="2025-10-03 17:03:12.746225202 +0000 UTC m=+5711.711781815" Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.763728 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.763707515 podStartE2EDuration="2.763707515s" podCreationTimestamp="2025-10-03 17:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:12.761937064 +0000 UTC m=+5711.727493677" watchObservedRunningTime="2025-10-03 17:03:12.763707515 +0000 UTC m=+5711.729264158" Oct 03 17:03:12 crc kubenswrapper[5081]: I1003 17:03:12.781517 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-r4922" podStartSLOduration=1.7815010070000001 podStartE2EDuration="1.781501007s" podCreationTimestamp="2025-10-03 17:03:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:12.778377227 +0000 UTC m=+5711.743933840" watchObservedRunningTime="2025-10-03 17:03:12.781501007 +0000 UTC m=+5711.747057620" Oct 03 17:03:13 crc kubenswrapper[5081]: I1003 17:03:13.747972 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" event={"ID":"ab9407e5-195d-4ddb-ae06-562042ba3db9","Type":"ContainerStarted","Data":"df65aa4156924ed08fb35d83c4356c742dfacd73f8d051635bc6a395ddb26fbd"} Oct 03 17:03:13 crc kubenswrapper[5081]: I1003 17:03:13.777616 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" podStartSLOduration=3.777587302 podStartE2EDuration="3.777587302s" podCreationTimestamp="2025-10-03 17:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:13.773180055 +0000 UTC m=+5712.738736688" watchObservedRunningTime="2025-10-03 17:03:13.777587302 +0000 UTC m=+5712.743143925" Oct 03 17:03:13 crc kubenswrapper[5081]: I1003 17:03:13.787404 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.787386244 podStartE2EDuration="3.787386244s" podCreationTimestamp="2025-10-03 17:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:12.822948109 +0000 UTC m=+5711.788504732" watchObservedRunningTime="2025-10-03 17:03:13.787386244 +0000 UTC m=+5712.752942857" Oct 03 17:03:14 crc kubenswrapper[5081]: I1003 17:03:14.762173 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:15 crc kubenswrapper[5081]: I1003 17:03:15.773372 5081 generic.go:334] "Generic (PLEG): container finished" podID="16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" containerID="934fae996577b5e325bc12e7ea71af5517c95424d14cdc78d3d2efec358c1451" exitCode=0 Oct 03 17:03:15 crc kubenswrapper[5081]: I1003 17:03:15.773481 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r4922" event={"ID":"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6","Type":"ContainerDied","Data":"934fae996577b5e325bc12e7ea71af5517c95424d14cdc78d3d2efec358c1451"} Oct 03 17:03:15 crc kubenswrapper[5081]: I1003 17:03:15.915489 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:16 crc kubenswrapper[5081]: I1003 17:03:16.052260 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 17:03:16 crc kubenswrapper[5081]: I1003 17:03:16.082867 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:03:16 crc kubenswrapper[5081]: I1003 17:03:16.082916 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:03:16 crc kubenswrapper[5081]: I1003 17:03:16.783283 5081 generic.go:334] "Generic (PLEG): container finished" podID="1802be7f-0d59-41b3-8e2d-ec4abf6a221a" containerID="52ca2cf3864549b0fa6d1ef8a3c2cdee015fef36d3283a6344bf6e0b550c9f6d" exitCode=0 Oct 03 17:03:16 crc kubenswrapper[5081]: I1003 17:03:16.783391 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-78n8t" event={"ID":"1802be7f-0d59-41b3-8e2d-ec4abf6a221a","Type":"ContainerDied","Data":"52ca2cf3864549b0fa6d1ef8a3c2cdee015fef36d3283a6344bf6e0b550c9f6d"} Oct 03 17:03:16 crc kubenswrapper[5081]: I1003 17:03:16.827643 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:03:16 crc kubenswrapper[5081]: E1003 17:03:16.827887 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.091718 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.234272 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-combined-ca-bundle\") pod \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.234408 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-scripts\") pod \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.234524 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-config-data\") pod \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.235095 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6xq2\" (UniqueName: \"kubernetes.io/projected/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-kube-api-access-w6xq2\") pod \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\" (UID: \"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6\") " Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.239225 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-kube-api-access-w6xq2" (OuterVolumeSpecName: "kube-api-access-w6xq2") pod "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" (UID: "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6"). InnerVolumeSpecName "kube-api-access-w6xq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.239199 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-scripts" (OuterVolumeSpecName: "scripts") pod "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" (UID: "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.258914 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-config-data" (OuterVolumeSpecName: "config-data") pod "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" (UID: "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.261146 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" (UID: "16d3f66b-7d30-4e2e-a731-bb11a58a3cf6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.337760 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.337967 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6xq2\" (UniqueName: \"kubernetes.io/projected/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-kube-api-access-w6xq2\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.338065 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.338119 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.792715 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r4922" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.792829 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r4922" event={"ID":"16d3f66b-7d30-4e2e-a731-bb11a58a3cf6","Type":"ContainerDied","Data":"57d1717fe1d3582bc1664c5c812115e7c956fc7569c4d71829710f8c449369e2"} Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.794458 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57d1717fe1d3582bc1664c5c812115e7c956fc7569c4d71829710f8c449369e2" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.869776 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:03:17 crc kubenswrapper[5081]: E1003 17:03:17.870192 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" containerName="nova-cell1-conductor-db-sync" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.870215 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" containerName="nova-cell1-conductor-db-sync" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.870402 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" containerName="nova-cell1-conductor-db-sync" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.871066 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.881783 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 17:03:17 crc kubenswrapper[5081]: I1003 17:03:17.899519 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.051780 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.052058 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.052202 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5rxp\" (UniqueName: \"kubernetes.io/projected/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-kube-api-access-n5rxp\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.154449 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5rxp\" (UniqueName: \"kubernetes.io/projected/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-kube-api-access-n5rxp\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.154534 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.154606 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.155956 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.160506 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.160645 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.173373 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5rxp\" (UniqueName: \"kubernetes.io/projected/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-kube-api-access-n5rxp\") pod \"nova-cell1-conductor-0\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.199586 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.359140 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-scripts\") pod \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.360228 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cf25\" (UniqueName: \"kubernetes.io/projected/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-kube-api-access-8cf25\") pod \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.360500 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-combined-ca-bundle\") pod \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.360660 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-config-data\") pod \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\" (UID: \"1802be7f-0d59-41b3-8e2d-ec4abf6a221a\") " Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.366656 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-kube-api-access-8cf25" (OuterVolumeSpecName: "kube-api-access-8cf25") pod "1802be7f-0d59-41b3-8e2d-ec4abf6a221a" (UID: "1802be7f-0d59-41b3-8e2d-ec4abf6a221a"). InnerVolumeSpecName "kube-api-access-8cf25". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.366813 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-scripts" (OuterVolumeSpecName: "scripts") pod "1802be7f-0d59-41b3-8e2d-ec4abf6a221a" (UID: "1802be7f-0d59-41b3-8e2d-ec4abf6a221a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.387081 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1802be7f-0d59-41b3-8e2d-ec4abf6a221a" (UID: "1802be7f-0d59-41b3-8e2d-ec4abf6a221a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.418736 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-config-data" (OuterVolumeSpecName: "config-data") pod "1802be7f-0d59-41b3-8e2d-ec4abf6a221a" (UID: "1802be7f-0d59-41b3-8e2d-ec4abf6a221a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.463698 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.463734 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.463743 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cf25\" (UniqueName: \"kubernetes.io/projected/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-kube-api-access-8cf25\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.463754 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1802be7f-0d59-41b3-8e2d-ec4abf6a221a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.647491 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:03:18 crc kubenswrapper[5081]: W1003 17:03:18.648502 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31cc3fb5_c219_470b_8a49_c1b1ae4c9cd2.slice/crio-697881178f0faac238656abe591bdf1347af5e0fd94cd040c624876937068a37 WatchSource:0}: Error finding container 697881178f0faac238656abe591bdf1347af5e0fd94cd040c624876937068a37: Status 404 returned error can't find the container with id 697881178f0faac238656abe591bdf1347af5e0fd94cd040c624876937068a37 Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.806246 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2","Type":"ContainerStarted","Data":"4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23"} Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.806296 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2","Type":"ContainerStarted","Data":"697881178f0faac238656abe591bdf1347af5e0fd94cd040c624876937068a37"} Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.806356 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.808073 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-78n8t" event={"ID":"1802be7f-0d59-41b3-8e2d-ec4abf6a221a","Type":"ContainerDied","Data":"cadae07470bbd6c9cca4e0d0ee4a2125c0b768968b6fa2a4ee002f0fa218feee"} Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.808111 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cadae07470bbd6c9cca4e0d0ee4a2125c0b768968b6fa2a4ee002f0fa218feee" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.808183 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-78n8t" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.829275 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.829250954 podStartE2EDuration="1.829250954s" podCreationTimestamp="2025-10-03 17:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:18.823089397 +0000 UTC m=+5717.788646020" watchObservedRunningTime="2025-10-03 17:03:18.829250954 +0000 UTC m=+5717.794807567" Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.978428 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.978821 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-log" containerID="cri-o://0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461" gracePeriod=30 Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.978986 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-api" containerID="cri-o://dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e" gracePeriod=30 Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.992904 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:18 crc kubenswrapper[5081]: I1003 17:03:18.993189 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" containerName="nova-scheduler-scheduler" containerID="cri-o://15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084" gracePeriod=30 Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.049297 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.049530 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-log" containerID="cri-o://848953d238f641249a125730dbbc4ea5aabc8aa7b662f99f495dadf4db8a6c99" gracePeriod=30 Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.049636 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-metadata" containerID="cri-o://af1000f55e658d9d3ef7e3d430e4067d2b6f2ca1a44d97459f4bf149827fb6e0" gracePeriod=30 Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.658818 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.787989 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-combined-ca-bundle\") pod \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.788053 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-config-data\") pod \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.788181 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa6a909e-8cb5-4016-bc91-f5bcd8859736-logs\") pod \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.788238 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z967\" (UniqueName: \"kubernetes.io/projected/fa6a909e-8cb5-4016-bc91-f5bcd8859736-kube-api-access-7z967\") pod \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\" (UID: \"fa6a909e-8cb5-4016-bc91-f5bcd8859736\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.788754 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa6a909e-8cb5-4016-bc91-f5bcd8859736-logs" (OuterVolumeSpecName: "logs") pod "fa6a909e-8cb5-4016-bc91-f5bcd8859736" (UID: "fa6a909e-8cb5-4016-bc91-f5bcd8859736"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.792856 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa6a909e-8cb5-4016-bc91-f5bcd8859736-kube-api-access-7z967" (OuterVolumeSpecName: "kube-api-access-7z967") pod "fa6a909e-8cb5-4016-bc91-f5bcd8859736" (UID: "fa6a909e-8cb5-4016-bc91-f5bcd8859736"). InnerVolumeSpecName "kube-api-access-7z967". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.815071 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-config-data" (OuterVolumeSpecName: "config-data") pod "fa6a909e-8cb5-4016-bc91-f5bcd8859736" (UID: "fa6a909e-8cb5-4016-bc91-f5bcd8859736"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.817545 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa6a909e-8cb5-4016-bc91-f5bcd8859736" (UID: "fa6a909e-8cb5-4016-bc91-f5bcd8859736"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.819118 5081 generic.go:334] "Generic (PLEG): container finished" podID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerID="af1000f55e658d9d3ef7e3d430e4067d2b6f2ca1a44d97459f4bf149827fb6e0" exitCode=0 Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.819148 5081 generic.go:334] "Generic (PLEG): container finished" podID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerID="848953d238f641249a125730dbbc4ea5aabc8aa7b662f99f495dadf4db8a6c99" exitCode=143 Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.819190 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1","Type":"ContainerDied","Data":"af1000f55e658d9d3ef7e3d430e4067d2b6f2ca1a44d97459f4bf149827fb6e0"} Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.819222 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1","Type":"ContainerDied","Data":"848953d238f641249a125730dbbc4ea5aabc8aa7b662f99f495dadf4db8a6c99"} Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.820852 5081 generic.go:334] "Generic (PLEG): container finished" podID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerID="dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e" exitCode=0 Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.820867 5081 generic.go:334] "Generic (PLEG): container finished" podID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerID="0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461" exitCode=143 Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.820921 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa6a909e-8cb5-4016-bc91-f5bcd8859736","Type":"ContainerDied","Data":"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e"} Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.820958 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.820980 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa6a909e-8cb5-4016-bc91-f5bcd8859736","Type":"ContainerDied","Data":"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461"} Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.820997 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fa6a909e-8cb5-4016-bc91-f5bcd8859736","Type":"ContainerDied","Data":"6329afa4181db48e08c5ed0fc05943ba7413cf2ca8b16fbb9c90f460141168ad"} Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.821036 5081 scope.go:117] "RemoveContainer" containerID="dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.845755 5081 scope.go:117] "RemoveContainer" containerID="0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.856944 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.884625 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.888746 5081 scope.go:117] "RemoveContainer" containerID="dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.890398 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.890451 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa6a909e-8cb5-4016-bc91-f5bcd8859736-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.890460 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa6a909e-8cb5-4016-bc91-f5bcd8859736-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.890469 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z967\" (UniqueName: \"kubernetes.io/projected/fa6a909e-8cb5-4016-bc91-f5bcd8859736-kube-api-access-7z967\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.890542 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:19 crc kubenswrapper[5081]: E1003 17:03:19.890989 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-log" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.891010 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-log" Oct 03 17:03:19 crc kubenswrapper[5081]: E1003 17:03:19.891026 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1802be7f-0d59-41b3-8e2d-ec4abf6a221a" containerName="nova-manage" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.891032 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1802be7f-0d59-41b3-8e2d-ec4abf6a221a" containerName="nova-manage" Oct 03 17:03:19 crc kubenswrapper[5081]: E1003 17:03:19.891044 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-api" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.891050 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-api" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.891241 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-api" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.891255 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" containerName="nova-api-log" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.891298 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1802be7f-0d59-41b3-8e2d-ec4abf6a221a" containerName="nova-manage" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.892534 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:19 crc kubenswrapper[5081]: E1003 17:03:19.893011 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e\": container with ID starting with dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e not found: ID does not exist" containerID="dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.893054 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e"} err="failed to get container status \"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e\": rpc error: code = NotFound desc = could not find container \"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e\": container with ID starting with dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e not found: ID does not exist" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.893122 5081 scope.go:117] "RemoveContainer" containerID="0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461" Oct 03 17:03:19 crc kubenswrapper[5081]: E1003 17:03:19.894938 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461\": container with ID starting with 0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461 not found: ID does not exist" containerID="0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.894968 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461"} err="failed to get container status \"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461\": rpc error: code = NotFound desc = could not find container \"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461\": container with ID starting with 0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461 not found: ID does not exist" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.895025 5081 scope.go:117] "RemoveContainer" containerID="dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.895191 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.895461 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e"} err="failed to get container status \"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e\": rpc error: code = NotFound desc = could not find container \"dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e\": container with ID starting with dc363ae542946ff22f03ab0a255d7dfeb594711e17c0a4c1e1ce033b50f3260e not found: ID does not exist" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.895503 5081 scope.go:117] "RemoveContainer" containerID="0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.895800 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461"} err="failed to get container status \"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461\": rpc error: code = NotFound desc = could not find container \"0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461\": container with ID starting with 0ad83e5cea73c0cf7eab421a049a7b9463e8cafa3fa64b824e74972f11ef9461 not found: ID does not exist" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.905023 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.928228 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.991896 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7wvh\" (UniqueName: \"kubernetes.io/projected/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-kube-api-access-w7wvh\") pod \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992011 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-combined-ca-bundle\") pod \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992126 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-config-data\") pod \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992238 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-logs\") pod \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\" (UID: \"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1\") " Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992534 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-config-data\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992589 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-logs" (OuterVolumeSpecName: "logs") pod "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" (UID: "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992629 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n44gb\" (UniqueName: \"kubernetes.io/projected/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-kube-api-access-n44gb\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992736 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-logs\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992785 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.992999 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:19 crc kubenswrapper[5081]: I1003 17:03:19.995974 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-kube-api-access-w7wvh" (OuterVolumeSpecName: "kube-api-access-w7wvh") pod "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" (UID: "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1"). InnerVolumeSpecName "kube-api-access-w7wvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.017242 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" (UID: "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.018336 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-config-data" (OuterVolumeSpecName: "config-data") pod "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" (UID: "cdb9ce75-5626-41ef-b98b-f07e5f5d44c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.093781 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-logs\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.093859 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.094002 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-config-data\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.094097 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n44gb\" (UniqueName: \"kubernetes.io/projected/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-kube-api-access-n44gb\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.094195 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.094211 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.094221 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7wvh\" (UniqueName: \"kubernetes.io/projected/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1-kube-api-access-w7wvh\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.094269 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-logs\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.097811 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-config-data\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.098752 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.111215 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n44gb\" (UniqueName: \"kubernetes.io/projected/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-kube-api-access-n44gb\") pod \"nova-api-0\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.217515 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.631173 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:20 crc kubenswrapper[5081]: W1003 17:03:20.641753 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d707dea_6e10_4f0c_a4f0_4cec8ed648c6.slice/crio-1a1d5ee6eee2763e90573be3f76faba6fb99137b9d123a45a69c07c90ead2d17 WatchSource:0}: Error finding container 1a1d5ee6eee2763e90573be3f76faba6fb99137b9d123a45a69c07c90ead2d17: Status 404 returned error can't find the container with id 1a1d5ee6eee2763e90573be3f76faba6fb99137b9d123a45a69c07c90ead2d17 Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.830193 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6","Type":"ContainerStarted","Data":"5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde"} Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.830526 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6","Type":"ContainerStarted","Data":"1a1d5ee6eee2763e90573be3f76faba6fb99137b9d123a45a69c07c90ead2d17"} Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.831597 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cdb9ce75-5626-41ef-b98b-f07e5f5d44c1","Type":"ContainerDied","Data":"b05e98add01bae36fb506494d7b7b49d60c5d056c5187b0e99f770ce62938439"} Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.831627 5081 scope.go:117] "RemoveContainer" containerID="af1000f55e658d9d3ef7e3d430e4067d2b6f2ca1a44d97459f4bf149827fb6e0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.831706 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.866209 5081 scope.go:117] "RemoveContainer" containerID="848953d238f641249a125730dbbc4ea5aabc8aa7b662f99f495dadf4db8a6c99" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.877395 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.889134 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.897000 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:20 crc kubenswrapper[5081]: E1003 17:03:20.897483 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-metadata" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.897499 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-metadata" Oct 03 17:03:20 crc kubenswrapper[5081]: E1003 17:03:20.897515 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-log" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.897524 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-log" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.897768 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-log" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.897794 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" containerName="nova-metadata-metadata" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.899038 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.901145 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.909928 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.914833 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5ndx\" (UniqueName: \"kubernetes.io/projected/ae9100b1-1555-4822-90c4-c05bc6cf97b0-kube-api-access-h5ndx\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.915194 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.915419 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-config-data\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.915492 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.915666 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9100b1-1555-4822-90c4-c05bc6cf97b0-logs\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:20 crc kubenswrapper[5081]: I1003 17:03:20.928102 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.017427 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5ndx\" (UniqueName: \"kubernetes.io/projected/ae9100b1-1555-4822-90c4-c05bc6cf97b0-kube-api-access-h5ndx\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.017532 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-config-data\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.017578 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.017635 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9100b1-1555-4822-90c4-c05bc6cf97b0-logs\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.018351 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9100b1-1555-4822-90c4-c05bc6cf97b0-logs\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.021178 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-config-data\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.023265 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.034678 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5ndx\" (UniqueName: \"kubernetes.io/projected/ae9100b1-1555-4822-90c4-c05bc6cf97b0-kube-api-access-h5ndx\") pod \"nova-metadata-0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.212719 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.219358 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.278145 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d454cf57-bfrxw"] Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.278363 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" podUID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerName="dnsmasq-dns" containerID="cri-o://aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099" gracePeriod=10 Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.753328 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:21 crc kubenswrapper[5081]: W1003 17:03:21.757730 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae9100b1_1555_4822_90c4_c05bc6cf97b0.slice/crio-15343f98322a184700ed1c748fffd175460ee4d851426e086ed8fd6d722ab1f1 WatchSource:0}: Error finding container 15343f98322a184700ed1c748fffd175460ee4d851426e086ed8fd6d722ab1f1: Status 404 returned error can't find the container with id 15343f98322a184700ed1c748fffd175460ee4d851426e086ed8fd6d722ab1f1 Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.800812 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.830544 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddpdv\" (UniqueName: \"kubernetes.io/projected/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-kube-api-access-ddpdv\") pod \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.830621 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-dns-svc\") pod \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.830660 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-nb\") pod \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.830816 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-config\") pod \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.830853 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-sb\") pod \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\" (UID: \"eb56f1bb-3606-4cfc-b803-07e89f3dffaf\") " Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.837034 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-kube-api-access-ddpdv" (OuterVolumeSpecName: "kube-api-access-ddpdv") pod "eb56f1bb-3606-4cfc-b803-07e89f3dffaf" (UID: "eb56f1bb-3606-4cfc-b803-07e89f3dffaf"). InnerVolumeSpecName "kube-api-access-ddpdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.858527 5081 generic.go:334] "Generic (PLEG): container finished" podID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerID="aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099" exitCode=0 Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.861724 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.862947 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdb9ce75-5626-41ef-b98b-f07e5f5d44c1" path="/var/lib/kubelet/pods/cdb9ce75-5626-41ef-b98b-f07e5f5d44c1/volumes" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.864171 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa6a909e-8cb5-4016-bc91-f5bcd8859736" path="/var/lib/kubelet/pods/fa6a909e-8cb5-4016-bc91-f5bcd8859736/volumes" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.871894 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae9100b1-1555-4822-90c4-c05bc6cf97b0","Type":"ContainerStarted","Data":"15343f98322a184700ed1c748fffd175460ee4d851426e086ed8fd6d722ab1f1"} Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.871945 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" event={"ID":"eb56f1bb-3606-4cfc-b803-07e89f3dffaf","Type":"ContainerDied","Data":"aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099"} Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.871970 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d454cf57-bfrxw" event={"ID":"eb56f1bb-3606-4cfc-b803-07e89f3dffaf","Type":"ContainerDied","Data":"7863f7a20aa1d4dd258d5c5ac6a18502bd0778d1f078a9264c0b8afd72407ef7"} Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.871989 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6","Type":"ContainerStarted","Data":"2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9"} Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.872022 5081 scope.go:117] "RemoveContainer" containerID="aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.891839 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.916547 5081 scope.go:117] "RemoveContainer" containerID="3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.922045 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eb56f1bb-3606-4cfc-b803-07e89f3dffaf" (UID: "eb56f1bb-3606-4cfc-b803-07e89f3dffaf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.940417 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddpdv\" (UniqueName: \"kubernetes.io/projected/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-kube-api-access-ddpdv\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.940696 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.950262 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eb56f1bb-3606-4cfc-b803-07e89f3dffaf" (UID: "eb56f1bb-3606-4cfc-b803-07e89f3dffaf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.953766 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-config" (OuterVolumeSpecName: "config") pod "eb56f1bb-3606-4cfc-b803-07e89f3dffaf" (UID: "eb56f1bb-3606-4cfc-b803-07e89f3dffaf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.960630 5081 scope.go:117] "RemoveContainer" containerID="aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099" Oct 03 17:03:21 crc kubenswrapper[5081]: E1003 17:03:21.961544 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099\": container with ID starting with aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099 not found: ID does not exist" containerID="aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.961605 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099"} err="failed to get container status \"aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099\": rpc error: code = NotFound desc = could not find container \"aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099\": container with ID starting with aac6d0e54c42c9ea4619cf7e038656a5b5915cee893af31bf46bb6811cd4a099 not found: ID does not exist" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.961633 5081 scope.go:117] "RemoveContainer" containerID="3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.967991 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eb56f1bb-3606-4cfc-b803-07e89f3dffaf" (UID: "eb56f1bb-3606-4cfc-b803-07e89f3dffaf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:03:21 crc kubenswrapper[5081]: E1003 17:03:21.970707 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6\": container with ID starting with 3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6 not found: ID does not exist" containerID="3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.970763 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6"} err="failed to get container status \"3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6\": rpc error: code = NotFound desc = could not find container \"3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6\": container with ID starting with 3b0d7180f9cc3f3c5e07c9d21639ed80f64dcccf90c2f21efeabfce25e5675b6 not found: ID does not exist" Oct 03 17:03:21 crc kubenswrapper[5081]: I1003 17:03:21.972614 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.972526877 podStartE2EDuration="2.972526877s" podCreationTimestamp="2025-10-03 17:03:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:21.958451182 +0000 UTC m=+5720.924007795" watchObservedRunningTime="2025-10-03 17:03:21.972526877 +0000 UTC m=+5720.938083490" Oct 03 17:03:22 crc kubenswrapper[5081]: I1003 17:03:22.042223 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:22 crc kubenswrapper[5081]: I1003 17:03:22.042261 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:22 crc kubenswrapper[5081]: I1003 17:03:22.042275 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb56f1bb-3606-4cfc-b803-07e89f3dffaf-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:22 crc kubenswrapper[5081]: I1003 17:03:22.198690 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d454cf57-bfrxw"] Oct 03 17:03:22 crc kubenswrapper[5081]: I1003 17:03:22.207225 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d454cf57-bfrxw"] Oct 03 17:03:22 crc kubenswrapper[5081]: I1003 17:03:22.871063 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae9100b1-1555-4822-90c4-c05bc6cf97b0","Type":"ContainerStarted","Data":"f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c"} Oct 03 17:03:22 crc kubenswrapper[5081]: I1003 17:03:22.871352 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae9100b1-1555-4822-90c4-c05bc6cf97b0","Type":"ContainerStarted","Data":"54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256"} Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.226235 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.255224 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.255205565 podStartE2EDuration="3.255205565s" podCreationTimestamp="2025-10-03 17:03:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:22.894349765 +0000 UTC m=+5721.859906398" watchObservedRunningTime="2025-10-03 17:03:23.255205565 +0000 UTC m=+5722.220762178" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.725325 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-4dpj8"] Oct 03 17:03:23 crc kubenswrapper[5081]: E1003 17:03:23.725841 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerName="dnsmasq-dns" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.725866 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerName="dnsmasq-dns" Oct 03 17:03:23 crc kubenswrapper[5081]: E1003 17:03:23.725898 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerName="init" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.725907 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerName="init" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.726115 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" containerName="dnsmasq-dns" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.726987 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.728815 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.730941 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.736315 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-4dpj8"] Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.760375 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.774919 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-combined-ca-bundle\") pod \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.775028 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prf7v\" (UniqueName: \"kubernetes.io/projected/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-kube-api-access-prf7v\") pod \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.775082 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-config-data\") pod \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\" (UID: \"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7\") " Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.775343 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44s6g\" (UniqueName: \"kubernetes.io/projected/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-kube-api-access-44s6g\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.775423 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-config-data\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.775452 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.775497 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-scripts\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.781920 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-kube-api-access-prf7v" (OuterVolumeSpecName: "kube-api-access-prf7v") pod "5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" (UID: "5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7"). InnerVolumeSpecName "kube-api-access-prf7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.814172 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" (UID: "5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.814504 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-config-data" (OuterVolumeSpecName: "config-data") pod "5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" (UID: "5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.838378 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb56f1bb-3606-4cfc-b803-07e89f3dffaf" path="/var/lib/kubelet/pods/eb56f1bb-3606-4cfc-b803-07e89f3dffaf/volumes" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.878210 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-config-data\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.879371 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.879611 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-scripts\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.884735 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-config-data\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.885053 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44s6g\" (UniqueName: \"kubernetes.io/projected/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-kube-api-access-44s6g\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.885539 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.885581 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prf7v\" (UniqueName: \"kubernetes.io/projected/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-kube-api-access-prf7v\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.885597 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.886073 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.886982 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-scripts\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.889130 5081 generic.go:334] "Generic (PLEG): container finished" podID="5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" containerID="15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084" exitCode=0 Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.889844 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.889869 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7","Type":"ContainerDied","Data":"15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084"} Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.889904 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7","Type":"ContainerDied","Data":"9e4739ad83e5c2a9f7bfbbdeee69f72d06429845b3e32df69dda1e85ac37a6c1"} Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.889926 5081 scope.go:117] "RemoveContainer" containerID="15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.909649 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44s6g\" (UniqueName: \"kubernetes.io/projected/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-kube-api-access-44s6g\") pod \"nova-cell1-cell-mapping-4dpj8\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.928682 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.942382 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.948204 5081 scope.go:117] "RemoveContainer" containerID="15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084" Oct 03 17:03:23 crc kubenswrapper[5081]: E1003 17:03:23.948747 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084\": container with ID starting with 15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084 not found: ID does not exist" containerID="15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.948807 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084"} err="failed to get container status \"15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084\": rpc error: code = NotFound desc = could not find container \"15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084\": container with ID starting with 15bba9f27fa7c1e80916c01494bb7947eb50714b119601b9005b8f61a8874084 not found: ID does not exist" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.954302 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:23 crc kubenswrapper[5081]: E1003 17:03:23.954789 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" containerName="nova-scheduler-scheduler" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.954810 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" containerName="nova-scheduler-scheduler" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.954997 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" containerName="nova-scheduler-scheduler" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.955675 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.961299 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.964936 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.987036 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2v4f\" (UniqueName: \"kubernetes.io/projected/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-kube-api-access-h2v4f\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.987169 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:23 crc kubenswrapper[5081]: I1003 17:03:23.987224 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-config-data\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.079008 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.088944 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2v4f\" (UniqueName: \"kubernetes.io/projected/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-kube-api-access-h2v4f\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.089094 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.089159 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-config-data\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.093719 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.097635 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-config-data\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.111802 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2v4f\" (UniqueName: \"kubernetes.io/projected/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-kube-api-access-h2v4f\") pod \"nova-scheduler-0\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.292287 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.531750 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-4dpj8"] Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.715036 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:24 crc kubenswrapper[5081]: W1003 17:03:24.718869 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod049fc4a5_eb8b_4758_9c47_fc8f7910ca13.slice/crio-553eb462bcb4261d14bba85640ce6c762e647a431cb2d31351ef8a080310b5f7 WatchSource:0}: Error finding container 553eb462bcb4261d14bba85640ce6c762e647a431cb2d31351ef8a080310b5f7: Status 404 returned error can't find the container with id 553eb462bcb4261d14bba85640ce6c762e647a431cb2d31351ef8a080310b5f7 Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.899155 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"049fc4a5-eb8b-4758-9c47-fc8f7910ca13","Type":"ContainerStarted","Data":"eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8"} Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.899495 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"049fc4a5-eb8b-4758-9c47-fc8f7910ca13","Type":"ContainerStarted","Data":"553eb462bcb4261d14bba85640ce6c762e647a431cb2d31351ef8a080310b5f7"} Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.901856 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4dpj8" event={"ID":"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c","Type":"ContainerStarted","Data":"15853a92d96f672769e2201a37ae361e4f8365b01ec8448a672e59bb4e7f2383"} Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.901899 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4dpj8" event={"ID":"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c","Type":"ContainerStarted","Data":"75026ec7e7f576e8c5f7995d883239e89845f130dd35aa6e7029409b81af1554"} Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.917533 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.917511015 podStartE2EDuration="1.917511015s" podCreationTimestamp="2025-10-03 17:03:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:24.915451756 +0000 UTC m=+5723.881008389" watchObservedRunningTime="2025-10-03 17:03:24.917511015 +0000 UTC m=+5723.883067628" Oct 03 17:03:24 crc kubenswrapper[5081]: I1003 17:03:24.932744 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-4dpj8" podStartSLOduration=1.932724893 podStartE2EDuration="1.932724893s" podCreationTimestamp="2025-10-03 17:03:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:24.928498871 +0000 UTC m=+5723.894055484" watchObservedRunningTime="2025-10-03 17:03:24.932724893 +0000 UTC m=+5723.898281506" Oct 03 17:03:25 crc kubenswrapper[5081]: I1003 17:03:25.847815 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7" path="/var/lib/kubelet/pods/5bbe5cc8-1f85-4de2-b3eb-31d203f6c1d7/volumes" Oct 03 17:03:26 crc kubenswrapper[5081]: I1003 17:03:26.220327 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:03:26 crc kubenswrapper[5081]: I1003 17:03:26.220714 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:03:29 crc kubenswrapper[5081]: I1003 17:03:29.293181 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 17:03:29 crc kubenswrapper[5081]: I1003 17:03:29.948390 5081 generic.go:334] "Generic (PLEG): container finished" podID="72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" containerID="15853a92d96f672769e2201a37ae361e4f8365b01ec8448a672e59bb4e7f2383" exitCode=0 Oct 03 17:03:29 crc kubenswrapper[5081]: I1003 17:03:29.948440 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4dpj8" event={"ID":"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c","Type":"ContainerDied","Data":"15853a92d96f672769e2201a37ae361e4f8365b01ec8448a672e59bb4e7f2383"} Oct 03 17:03:30 crc kubenswrapper[5081]: I1003 17:03:30.218095 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 17:03:30 crc kubenswrapper[5081]: I1003 17:03:30.218218 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 17:03:30 crc kubenswrapper[5081]: I1003 17:03:30.827831 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:03:30 crc kubenswrapper[5081]: E1003 17:03:30.828403 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.219948 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.219999 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.273844 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.301782 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.68:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.302012 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.68:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.332251 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44s6g\" (UniqueName: \"kubernetes.io/projected/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-kube-api-access-44s6g\") pod \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.332637 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-config-data\") pod \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.332666 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-combined-ca-bundle\") pod \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.332760 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-scripts\") pod \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\" (UID: \"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c\") " Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.341107 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-scripts" (OuterVolumeSpecName: "scripts") pod "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" (UID: "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.341365 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-kube-api-access-44s6g" (OuterVolumeSpecName: "kube-api-access-44s6g") pod "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" (UID: "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c"). InnerVolumeSpecName "kube-api-access-44s6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.360856 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-config-data" (OuterVolumeSpecName: "config-data") pod "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" (UID: "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.361652 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" (UID: "72ebfdb3-5562-4e4b-9b3e-aa3007ae592c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.435146 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.435188 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.435201 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.435211 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44s6g\" (UniqueName: \"kubernetes.io/projected/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c-kube-api-access-44s6g\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.969006 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4dpj8" event={"ID":"72ebfdb3-5562-4e4b-9b3e-aa3007ae592c","Type":"ContainerDied","Data":"75026ec7e7f576e8c5f7995d883239e89845f130dd35aa6e7029409b81af1554"} Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.969046 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75026ec7e7f576e8c5f7995d883239e89845f130dd35aa6e7029409b81af1554" Oct 03 17:03:31 crc kubenswrapper[5081]: I1003 17:03:31.969075 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4dpj8" Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.142384 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.142757 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-log" containerID="cri-o://5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde" gracePeriod=30 Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.142919 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-api" containerID="cri-o://2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9" gracePeriod=30 Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.162916 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.163131 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="049fc4a5-eb8b-4758-9c47-fc8f7910ca13" containerName="nova-scheduler-scheduler" containerID="cri-o://eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8" gracePeriod=30 Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.239986 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.240246 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-log" containerID="cri-o://54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256" gracePeriod=30 Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.240308 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-metadata" containerID="cri-o://f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c" gracePeriod=30 Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.244626 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.69:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.244767 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.69:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.979149 5081 generic.go:334] "Generic (PLEG): container finished" podID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerID="54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256" exitCode=143 Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.979228 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae9100b1-1555-4822-90c4-c05bc6cf97b0","Type":"ContainerDied","Data":"54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256"} Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.982245 5081 generic.go:334] "Generic (PLEG): container finished" podID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerID="5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde" exitCode=143 Oct 03 17:03:32 crc kubenswrapper[5081]: I1003 17:03:32.982295 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6","Type":"ContainerDied","Data":"5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde"} Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.400328 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.535953 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2v4f\" (UniqueName: \"kubernetes.io/projected/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-kube-api-access-h2v4f\") pod \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.536595 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-config-data\") pod \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.536684 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-combined-ca-bundle\") pod \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\" (UID: \"049fc4a5-eb8b-4758-9c47-fc8f7910ca13\") " Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.541061 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-kube-api-access-h2v4f" (OuterVolumeSpecName: "kube-api-access-h2v4f") pod "049fc4a5-eb8b-4758-9c47-fc8f7910ca13" (UID: "049fc4a5-eb8b-4758-9c47-fc8f7910ca13"). InnerVolumeSpecName "kube-api-access-h2v4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.561856 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-config-data" (OuterVolumeSpecName: "config-data") pod "049fc4a5-eb8b-4758-9c47-fc8f7910ca13" (UID: "049fc4a5-eb8b-4758-9c47-fc8f7910ca13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.563920 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "049fc4a5-eb8b-4758-9c47-fc8f7910ca13" (UID: "049fc4a5-eb8b-4758-9c47-fc8f7910ca13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.638414 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2v4f\" (UniqueName: \"kubernetes.io/projected/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-kube-api-access-h2v4f\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.638450 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.638459 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049fc4a5-eb8b-4758-9c47-fc8f7910ca13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.941746 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:36 crc kubenswrapper[5081]: I1003 17:03:36.989497 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.018922 5081 generic.go:334] "Generic (PLEG): container finished" podID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerID="f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c" exitCode=0 Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.018995 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.019017 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae9100b1-1555-4822-90c4-c05bc6cf97b0","Type":"ContainerDied","Data":"f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c"} Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.019044 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae9100b1-1555-4822-90c4-c05bc6cf97b0","Type":"ContainerDied","Data":"15343f98322a184700ed1c748fffd175460ee4d851426e086ed8fd6d722ab1f1"} Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.019064 5081 scope.go:117] "RemoveContainer" containerID="f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.020526 5081 generic.go:334] "Generic (PLEG): container finished" podID="049fc4a5-eb8b-4758-9c47-fc8f7910ca13" containerID="eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8" exitCode=0 Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.020593 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"049fc4a5-eb8b-4758-9c47-fc8f7910ca13","Type":"ContainerDied","Data":"eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8"} Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.020615 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"049fc4a5-eb8b-4758-9c47-fc8f7910ca13","Type":"ContainerDied","Data":"553eb462bcb4261d14bba85640ce6c762e647a431cb2d31351ef8a080310b5f7"} Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.020638 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.024493 5081 generic.go:334] "Generic (PLEG): container finished" podID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerID="2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9" exitCode=0 Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.024542 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6","Type":"ContainerDied","Data":"2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9"} Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.024605 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6","Type":"ContainerDied","Data":"1a1d5ee6eee2763e90573be3f76faba6fb99137b9d123a45a69c07c90ead2d17"} Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.024611 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.057538 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-config-data\") pod \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.057834 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9100b1-1555-4822-90c4-c05bc6cf97b0-logs\") pod \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.058721 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-combined-ca-bundle\") pod \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.058789 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n44gb\" (UniqueName: \"kubernetes.io/projected/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-kube-api-access-n44gb\") pod \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.058814 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-config-data\") pod \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.058971 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-combined-ca-bundle\") pod \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.058996 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-logs\") pod \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\" (UID: \"3d707dea-6e10-4f0c-a4f0-4cec8ed648c6\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.059037 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5ndx\" (UniqueName: \"kubernetes.io/projected/ae9100b1-1555-4822-90c4-c05bc6cf97b0-kube-api-access-h5ndx\") pod \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\" (UID: \"ae9100b1-1555-4822-90c4-c05bc6cf97b0\") " Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.060325 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-logs" (OuterVolumeSpecName: "logs") pod "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" (UID: "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.060488 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae9100b1-1555-4822-90c4-c05bc6cf97b0-logs" (OuterVolumeSpecName: "logs") pod "ae9100b1-1555-4822-90c4-c05bc6cf97b0" (UID: "ae9100b1-1555-4822-90c4-c05bc6cf97b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.061628 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.061648 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9100b1-1555-4822-90c4-c05bc6cf97b0-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.063356 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae9100b1-1555-4822-90c4-c05bc6cf97b0-kube-api-access-h5ndx" (OuterVolumeSpecName: "kube-api-access-h5ndx") pod "ae9100b1-1555-4822-90c4-c05bc6cf97b0" (UID: "ae9100b1-1555-4822-90c4-c05bc6cf97b0"). InnerVolumeSpecName "kube-api-access-h5ndx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.070136 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-kube-api-access-n44gb" (OuterVolumeSpecName: "kube-api-access-n44gb") pod "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" (UID: "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6"). InnerVolumeSpecName "kube-api-access-n44gb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.076108 5081 scope.go:117] "RemoveContainer" containerID="54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.090416 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae9100b1-1555-4822-90c4-c05bc6cf97b0" (UID: "ae9100b1-1555-4822-90c4-c05bc6cf97b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.091040 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-config-data" (OuterVolumeSpecName: "config-data") pod "ae9100b1-1555-4822-90c4-c05bc6cf97b0" (UID: "ae9100b1-1555-4822-90c4-c05bc6cf97b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.091820 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.101221 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.104723 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-config-data" (OuterVolumeSpecName: "config-data") pod "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" (UID: "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.106270 5081 scope.go:117] "RemoveContainer" containerID="f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.106728 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c\": container with ID starting with f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c not found: ID does not exist" containerID="f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.106776 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c"} err="failed to get container status \"f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c\": rpc error: code = NotFound desc = could not find container \"f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c\": container with ID starting with f8692642d297a14f4ffdaed56865471eb12561a42d16429bf1ee17fad7d9e48c not found: ID does not exist" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.106814 5081 scope.go:117] "RemoveContainer" containerID="54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.107003 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" (UID: "3d707dea-6e10-4f0c-a4f0-4cec8ed648c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.107266 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256\": container with ID starting with 54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256 not found: ID does not exist" containerID="54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.107302 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256"} err="failed to get container status \"54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256\": rpc error: code = NotFound desc = could not find container \"54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256\": container with ID starting with 54b598a6b8ffc7b50951610c963e6253352800456348216b7f6791cb13b83256 not found: ID does not exist" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.107324 5081 scope.go:117] "RemoveContainer" containerID="eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.109508 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.110035 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-log" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110054 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-log" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.110076 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-metadata" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110083 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-metadata" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.110096 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-api" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110104 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-api" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.110114 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-log" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110120 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-log" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.110140 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" containerName="nova-manage" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110147 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" containerName="nova-manage" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.110159 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="049fc4a5-eb8b-4758-9c47-fc8f7910ca13" containerName="nova-scheduler-scheduler" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110166 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="049fc4a5-eb8b-4758-9c47-fc8f7910ca13" containerName="nova-scheduler-scheduler" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110380 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" containerName="nova-manage" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110402 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="049fc4a5-eb8b-4758-9c47-fc8f7910ca13" containerName="nova-scheduler-scheduler" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110418 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-log" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110431 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-log" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110456 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" containerName="nova-api-api" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.110466 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" containerName="nova-metadata-metadata" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.111467 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.117490 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.119067 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.124946 5081 scope.go:117] "RemoveContainer" containerID="eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.126255 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8\": container with ID starting with eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8 not found: ID does not exist" containerID="eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.126294 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8"} err="failed to get container status \"eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8\": rpc error: code = NotFound desc = could not find container \"eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8\": container with ID starting with eefa5cab01de0948ab30cee553aaf484973c3e80fcecc5bc6ae1545dea34b0f8 not found: ID does not exist" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.126321 5081 scope.go:117] "RemoveContainer" containerID="2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.162893 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-config-data\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.162933 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.163030 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk7d5\" (UniqueName: \"kubernetes.io/projected/c70c657d-63c0-4be3-93b7-73288e10c7d0-kube-api-access-gk7d5\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.163166 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.163180 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.163191 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n44gb\" (UniqueName: \"kubernetes.io/projected/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-kube-api-access-n44gb\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.163200 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae9100b1-1555-4822-90c4-c05bc6cf97b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.163210 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.163219 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5ndx\" (UniqueName: \"kubernetes.io/projected/ae9100b1-1555-4822-90c4-c05bc6cf97b0-kube-api-access-h5ndx\") on node \"crc\" DevicePath \"\"" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.199492 5081 scope.go:117] "RemoveContainer" containerID="5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.217237 5081 scope.go:117] "RemoveContainer" containerID="2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.217739 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9\": container with ID starting with 2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9 not found: ID does not exist" containerID="2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.217766 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9"} err="failed to get container status \"2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9\": rpc error: code = NotFound desc = could not find container \"2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9\": container with ID starting with 2d76103e53f1fa7026642d2fedff694f0a298023f8ca78327216c356dac9a8d9 not found: ID does not exist" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.217789 5081 scope.go:117] "RemoveContainer" containerID="5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde" Oct 03 17:03:37 crc kubenswrapper[5081]: E1003 17:03:37.218081 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde\": container with ID starting with 5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde not found: ID does not exist" containerID="5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.218116 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde"} err="failed to get container status \"5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde\": rpc error: code = NotFound desc = could not find container \"5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde\": container with ID starting with 5908b9975b6ea36855f6691af2ab593c09a1aa2c0d38c44a6d8708dcdcb5bfde not found: ID does not exist" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.264765 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk7d5\" (UniqueName: \"kubernetes.io/projected/c70c657d-63c0-4be3-93b7-73288e10c7d0-kube-api-access-gk7d5\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.264857 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-config-data\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.264878 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.269625 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-config-data\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.269654 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.280535 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk7d5\" (UniqueName: \"kubernetes.io/projected/c70c657d-63c0-4be3-93b7-73288e10c7d0-kube-api-access-gk7d5\") pod \"nova-scheduler-0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.370638 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.396731 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.424186 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.432573 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.443702 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.445225 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.447122 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.453231 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.455074 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.456921 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.464649 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.468979 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkstd\" (UniqueName: \"kubernetes.io/projected/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-kube-api-access-jkstd\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.469014 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-config-data\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.469045 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.469086 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-logs\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.469137 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvfpw\" (UniqueName: \"kubernetes.io/projected/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-kube-api-access-kvfpw\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.469182 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.469227 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-config-data\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.469248 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-logs\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.474808 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.496529 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.570748 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-config-data\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.570799 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-logs\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.570945 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkstd\" (UniqueName: \"kubernetes.io/projected/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-kube-api-access-jkstd\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.570968 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-config-data\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.571004 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.571060 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-logs\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.571130 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvfpw\" (UniqueName: \"kubernetes.io/projected/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-kube-api-access-kvfpw\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.571228 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.575273 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-logs\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.576021 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.576156 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-config-data\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.578956 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-logs\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.580146 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-config-data\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.582104 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.592311 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvfpw\" (UniqueName: \"kubernetes.io/projected/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-kube-api-access-kvfpw\") pod \"nova-metadata-0\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.592345 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkstd\" (UniqueName: \"kubernetes.io/projected/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-kube-api-access-jkstd\") pod \"nova-api-0\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.769426 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.779280 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.843028 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="049fc4a5-eb8b-4758-9c47-fc8f7910ca13" path="/var/lib/kubelet/pods/049fc4a5-eb8b-4758-9c47-fc8f7910ca13/volumes" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.843967 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d707dea-6e10-4f0c-a4f0-4cec8ed648c6" path="/var/lib/kubelet/pods/3d707dea-6e10-4f0c-a4f0-4cec8ed648c6/volumes" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.844744 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae9100b1-1555-4822-90c4-c05bc6cf97b0" path="/var/lib/kubelet/pods/ae9100b1-1555-4822-90c4-c05bc6cf97b0/volumes" Oct 03 17:03:37 crc kubenswrapper[5081]: I1003 17:03:37.999663 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:03:38 crc kubenswrapper[5081]: I1003 17:03:38.052845 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c70c657d-63c0-4be3-93b7-73288e10c7d0","Type":"ContainerStarted","Data":"dbde67b69be68805a404038722f726e83e60bb5723c10d2b8ddd085e65649695"} Oct 03 17:03:38 crc kubenswrapper[5081]: I1003 17:03:38.124009 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:03:38 crc kubenswrapper[5081]: W1003 17:03:38.128619 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod806a4a4b_aad6_44b5_b2f2_ae85a3be9fdc.slice/crio-25227166d9d19f3a2765f72cf17b67bb4a0217739bc7e2e285dae49467624d59 WatchSource:0}: Error finding container 25227166d9d19f3a2765f72cf17b67bb4a0217739bc7e2e285dae49467624d59: Status 404 returned error can't find the container with id 25227166d9d19f3a2765f72cf17b67bb4a0217739bc7e2e285dae49467624d59 Oct 03 17:03:38 crc kubenswrapper[5081]: I1003 17:03:38.377783 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:03:38 crc kubenswrapper[5081]: W1003 17:03:38.387253 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2d2a3c2_3c46_49e7_9a38_cf4f42d910af.slice/crio-23516738372c1de853ebe3a3e9d9135fe5ffe7d90c3387c0b1f0c9e495b689cc WatchSource:0}: Error finding container 23516738372c1de853ebe3a3e9d9135fe5ffe7d90c3387c0b1f0c9e495b689cc: Status 404 returned error can't find the container with id 23516738372c1de853ebe3a3e9d9135fe5ffe7d90c3387c0b1f0c9e495b689cc Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.073295 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af","Type":"ContainerStarted","Data":"27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4"} Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.073344 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af","Type":"ContainerStarted","Data":"e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446"} Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.073355 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af","Type":"ContainerStarted","Data":"23516738372c1de853ebe3a3e9d9135fe5ffe7d90c3387c0b1f0c9e495b689cc"} Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.075478 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c70c657d-63c0-4be3-93b7-73288e10c7d0","Type":"ContainerStarted","Data":"11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b"} Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.078679 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc","Type":"ContainerStarted","Data":"3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de"} Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.078714 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc","Type":"ContainerStarted","Data":"ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23"} Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.078730 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc","Type":"ContainerStarted","Data":"25227166d9d19f3a2765f72cf17b67bb4a0217739bc7e2e285dae49467624d59"} Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.096978 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.096954355 podStartE2EDuration="2.096954355s" podCreationTimestamp="2025-10-03 17:03:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:39.093404323 +0000 UTC m=+5738.058960936" watchObservedRunningTime="2025-10-03 17:03:39.096954355 +0000 UTC m=+5738.062510978" Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.129272 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.129254115 podStartE2EDuration="2.129254115s" podCreationTimestamp="2025-10-03 17:03:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:39.126043132 +0000 UTC m=+5738.091599745" watchObservedRunningTime="2025-10-03 17:03:39.129254115 +0000 UTC m=+5738.094810728" Oct 03 17:03:39 crc kubenswrapper[5081]: I1003 17:03:39.148169 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.148150818 podStartE2EDuration="2.148150818s" podCreationTimestamp="2025-10-03 17:03:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:03:39.140260241 +0000 UTC m=+5738.105816854" watchObservedRunningTime="2025-10-03 17:03:39.148150818 +0000 UTC m=+5738.113707431" Oct 03 17:03:41 crc kubenswrapper[5081]: I1003 17:03:41.835220 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:03:41 crc kubenswrapper[5081]: E1003 17:03:41.835792 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:03:42 crc kubenswrapper[5081]: I1003 17:03:42.496662 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 17:03:42 crc kubenswrapper[5081]: I1003 17:03:42.769952 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:03:42 crc kubenswrapper[5081]: I1003 17:03:42.770024 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:03:47 crc kubenswrapper[5081]: I1003 17:03:47.496951 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 17:03:47 crc kubenswrapper[5081]: I1003 17:03:47.522094 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 17:03:47 crc kubenswrapper[5081]: I1003 17:03:47.769705 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 17:03:47 crc kubenswrapper[5081]: I1003 17:03:47.769756 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 17:03:47 crc kubenswrapper[5081]: I1003 17:03:47.780002 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 17:03:47 crc kubenswrapper[5081]: I1003 17:03:47.780269 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 17:03:48 crc kubenswrapper[5081]: I1003 17:03:48.180188 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 17:03:48 crc kubenswrapper[5081]: I1003 17:03:48.934762 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:48 crc kubenswrapper[5081]: I1003 17:03:48.934811 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:48 crc kubenswrapper[5081]: I1003 17:03:48.934810 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:48 crc kubenswrapper[5081]: I1003 17:03:48.934762 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:03:52 crc kubenswrapper[5081]: I1003 17:03:52.827480 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:03:52 crc kubenswrapper[5081]: E1003 17:03:52.828246 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.775931 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.777097 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.778934 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.779824 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.784087 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.785635 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.789022 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 17:03:57 crc kubenswrapper[5081]: I1003 17:03:57.789165 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.235244 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.237893 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.383576 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5855f8f99f-vpxd2"] Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.385354 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.418706 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5855f8f99f-vpxd2"] Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.462400 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-nb\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.462479 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-config\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.462501 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-sb\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.462532 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sctxw\" (UniqueName: \"kubernetes.io/projected/5532c04d-b48c-4db2-932c-c8036973e596-kube-api-access-sctxw\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.462555 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-dns-svc\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.563852 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-nb\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.563956 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-config\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.563985 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-sb\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.564036 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sctxw\" (UniqueName: \"kubernetes.io/projected/5532c04d-b48c-4db2-932c-c8036973e596-kube-api-access-sctxw\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.564070 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-dns-svc\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.565067 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-sb\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.565094 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-dns-svc\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.565421 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-nb\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.565707 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-config\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.585040 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sctxw\" (UniqueName: \"kubernetes.io/projected/5532c04d-b48c-4db2-932c-c8036973e596-kube-api-access-sctxw\") pod \"dnsmasq-dns-5855f8f99f-vpxd2\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:58 crc kubenswrapper[5081]: I1003 17:03:58.709959 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:03:59 crc kubenswrapper[5081]: W1003 17:03:59.181160 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5532c04d_b48c_4db2_932c_c8036973e596.slice/crio-bd208c305d2bd7f53d78ba687cedb347b111afecc8e491ca996c05f505a694c3 WatchSource:0}: Error finding container bd208c305d2bd7f53d78ba687cedb347b111afecc8e491ca996c05f505a694c3: Status 404 returned error can't find the container with id bd208c305d2bd7f53d78ba687cedb347b111afecc8e491ca996c05f505a694c3 Oct 03 17:03:59 crc kubenswrapper[5081]: I1003 17:03:59.183921 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5855f8f99f-vpxd2"] Oct 03 17:03:59 crc kubenswrapper[5081]: I1003 17:03:59.244475 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" event={"ID":"5532c04d-b48c-4db2-932c-c8036973e596","Type":"ContainerStarted","Data":"bd208c305d2bd7f53d78ba687cedb347b111afecc8e491ca996c05f505a694c3"} Oct 03 17:04:00 crc kubenswrapper[5081]: I1003 17:04:00.255932 5081 generic.go:334] "Generic (PLEG): container finished" podID="5532c04d-b48c-4db2-932c-c8036973e596" containerID="107bb531a4cf8ac5743763378b7172869d708b68d103ab81d161b70d6abcec8b" exitCode=0 Oct 03 17:04:00 crc kubenswrapper[5081]: I1003 17:04:00.256098 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" event={"ID":"5532c04d-b48c-4db2-932c-c8036973e596","Type":"ContainerDied","Data":"107bb531a4cf8ac5743763378b7172869d708b68d103ab81d161b70d6abcec8b"} Oct 03 17:04:01 crc kubenswrapper[5081]: I1003 17:04:01.266284 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" event={"ID":"5532c04d-b48c-4db2-932c-c8036973e596","Type":"ContainerStarted","Data":"536c583b39cea92bd745c9a883de4dd50d0725adcb64a94c39d26664abf58dc1"} Oct 03 17:04:01 crc kubenswrapper[5081]: I1003 17:04:01.266753 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:04:01 crc kubenswrapper[5081]: I1003 17:04:01.288368 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" podStartSLOduration=3.288232256 podStartE2EDuration="3.288232256s" podCreationTimestamp="2025-10-03 17:03:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:01.281529964 +0000 UTC m=+5760.247086577" watchObservedRunningTime="2025-10-03 17:04:01.288232256 +0000 UTC m=+5760.253788879" Oct 03 17:04:04 crc kubenswrapper[5081]: I1003 17:04:04.829351 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:04:04 crc kubenswrapper[5081]: E1003 17:04:04.830184 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:04:08 crc kubenswrapper[5081]: I1003 17:04:08.712808 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:04:08 crc kubenswrapper[5081]: I1003 17:04:08.776402 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5695b4b4c5-xj44x"] Oct 03 17:04:08 crc kubenswrapper[5081]: I1003 17:04:08.776641 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" podUID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerName="dnsmasq-dns" containerID="cri-o://df65aa4156924ed08fb35d83c4356c742dfacd73f8d051635bc6a395ddb26fbd" gracePeriod=10 Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.352081 5081 generic.go:334] "Generic (PLEG): container finished" podID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerID="df65aa4156924ed08fb35d83c4356c742dfacd73f8d051635bc6a395ddb26fbd" exitCode=0 Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.352510 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" event={"ID":"ab9407e5-195d-4ddb-ae06-562042ba3db9","Type":"ContainerDied","Data":"df65aa4156924ed08fb35d83c4356c742dfacd73f8d051635bc6a395ddb26fbd"} Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.520174 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.567408 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-nb\") pod \"ab9407e5-195d-4ddb-ae06-562042ba3db9\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.567493 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qj2t\" (UniqueName: \"kubernetes.io/projected/ab9407e5-195d-4ddb-ae06-562042ba3db9-kube-api-access-7qj2t\") pod \"ab9407e5-195d-4ddb-ae06-562042ba3db9\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.567548 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-sb\") pod \"ab9407e5-195d-4ddb-ae06-562042ba3db9\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.567653 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-dns-svc\") pod \"ab9407e5-195d-4ddb-ae06-562042ba3db9\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.567742 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-config\") pod \"ab9407e5-195d-4ddb-ae06-562042ba3db9\" (UID: \"ab9407e5-195d-4ddb-ae06-562042ba3db9\") " Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.576523 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab9407e5-195d-4ddb-ae06-562042ba3db9-kube-api-access-7qj2t" (OuterVolumeSpecName: "kube-api-access-7qj2t") pod "ab9407e5-195d-4ddb-ae06-562042ba3db9" (UID: "ab9407e5-195d-4ddb-ae06-562042ba3db9"). InnerVolumeSpecName "kube-api-access-7qj2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.626200 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab9407e5-195d-4ddb-ae06-562042ba3db9" (UID: "ab9407e5-195d-4ddb-ae06-562042ba3db9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.631375 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab9407e5-195d-4ddb-ae06-562042ba3db9" (UID: "ab9407e5-195d-4ddb-ae06-562042ba3db9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.644069 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab9407e5-195d-4ddb-ae06-562042ba3db9" (UID: "ab9407e5-195d-4ddb-ae06-562042ba3db9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.644825 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-config" (OuterVolumeSpecName: "config") pod "ab9407e5-195d-4ddb-ae06-562042ba3db9" (UID: "ab9407e5-195d-4ddb-ae06-562042ba3db9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.670024 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qj2t\" (UniqueName: \"kubernetes.io/projected/ab9407e5-195d-4ddb-ae06-562042ba3db9-kube-api-access-7qj2t\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.670065 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.670089 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.670107 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:09 crc kubenswrapper[5081]: I1003 17:04:09.670119 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9407e5-195d-4ddb-ae06-562042ba3db9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:10 crc kubenswrapper[5081]: I1003 17:04:10.369113 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" event={"ID":"ab9407e5-195d-4ddb-ae06-562042ba3db9","Type":"ContainerDied","Data":"db6b2e013afd79c00ba4239646d083da40a897ebb1a22801b28d35eee40dd314"} Oct 03 17:04:10 crc kubenswrapper[5081]: I1003 17:04:10.370261 5081 scope.go:117] "RemoveContainer" containerID="df65aa4156924ed08fb35d83c4356c742dfacd73f8d051635bc6a395ddb26fbd" Oct 03 17:04:10 crc kubenswrapper[5081]: I1003 17:04:10.370388 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5695b4b4c5-xj44x" Oct 03 17:04:10 crc kubenswrapper[5081]: I1003 17:04:10.400770 5081 scope.go:117] "RemoveContainer" containerID="745dc5a34c89a4f8129635bff13802ff9a323f772c53a1c3c46d070d22b6cdca" Oct 03 17:04:10 crc kubenswrapper[5081]: I1003 17:04:10.404942 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5695b4b4c5-xj44x"] Oct 03 17:04:10 crc kubenswrapper[5081]: I1003 17:04:10.417330 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5695b4b4c5-xj44x"] Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.562869 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-lhfpn"] Oct 03 17:04:11 crc kubenswrapper[5081]: E1003 17:04:11.563356 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerName="init" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.563371 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerName="init" Oct 03 17:04:11 crc kubenswrapper[5081]: E1003 17:04:11.563399 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerName="dnsmasq-dns" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.563409 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerName="dnsmasq-dns" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.563671 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab9407e5-195d-4ddb-ae06-562042ba3db9" containerName="dnsmasq-dns" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.564471 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhfpn" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.576276 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lhfpn"] Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.605587 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4xh8\" (UniqueName: \"kubernetes.io/projected/e3b37e5d-f3e7-4362-a5fe-84df1d174d7d-kube-api-access-w4xh8\") pod \"cinder-db-create-lhfpn\" (UID: \"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d\") " pod="openstack/cinder-db-create-lhfpn" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.707524 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4xh8\" (UniqueName: \"kubernetes.io/projected/e3b37e5d-f3e7-4362-a5fe-84df1d174d7d-kube-api-access-w4xh8\") pod \"cinder-db-create-lhfpn\" (UID: \"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d\") " pod="openstack/cinder-db-create-lhfpn" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.724446 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4xh8\" (UniqueName: \"kubernetes.io/projected/e3b37e5d-f3e7-4362-a5fe-84df1d174d7d-kube-api-access-w4xh8\") pod \"cinder-db-create-lhfpn\" (UID: \"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d\") " pod="openstack/cinder-db-create-lhfpn" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.839333 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab9407e5-195d-4ddb-ae06-562042ba3db9" path="/var/lib/kubelet/pods/ab9407e5-195d-4ddb-ae06-562042ba3db9/volumes" Oct 03 17:04:11 crc kubenswrapper[5081]: I1003 17:04:11.890472 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhfpn" Oct 03 17:04:12 crc kubenswrapper[5081]: I1003 17:04:12.359211 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lhfpn"] Oct 03 17:04:12 crc kubenswrapper[5081]: I1003 17:04:12.388450 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lhfpn" event={"ID":"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d","Type":"ContainerStarted","Data":"7426c2a3a15befdd0eda444fcf649a853ed4d4fb00b1645179bdd3c59a0545a1"} Oct 03 17:04:13 crc kubenswrapper[5081]: I1003 17:04:13.401355 5081 generic.go:334] "Generic (PLEG): container finished" podID="e3b37e5d-f3e7-4362-a5fe-84df1d174d7d" containerID="3fec7416e4463a5308623f6c7c7357fd93a83a3e93fb8c7bd0aae18cf4ca503c" exitCode=0 Oct 03 17:04:13 crc kubenswrapper[5081]: I1003 17:04:13.401642 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lhfpn" event={"ID":"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d","Type":"ContainerDied","Data":"3fec7416e4463a5308623f6c7c7357fd93a83a3e93fb8c7bd0aae18cf4ca503c"} Oct 03 17:04:14 crc kubenswrapper[5081]: I1003 17:04:14.733036 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhfpn" Oct 03 17:04:14 crc kubenswrapper[5081]: I1003 17:04:14.761050 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xh8\" (UniqueName: \"kubernetes.io/projected/e3b37e5d-f3e7-4362-a5fe-84df1d174d7d-kube-api-access-w4xh8\") pod \"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d\" (UID: \"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d\") " Oct 03 17:04:14 crc kubenswrapper[5081]: I1003 17:04:14.772328 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3b37e5d-f3e7-4362-a5fe-84df1d174d7d-kube-api-access-w4xh8" (OuterVolumeSpecName: "kube-api-access-w4xh8") pod "e3b37e5d-f3e7-4362-a5fe-84df1d174d7d" (UID: "e3b37e5d-f3e7-4362-a5fe-84df1d174d7d"). InnerVolumeSpecName "kube-api-access-w4xh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:14 crc kubenswrapper[5081]: I1003 17:04:14.862849 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xh8\" (UniqueName: \"kubernetes.io/projected/e3b37e5d-f3e7-4362-a5fe-84df1d174d7d-kube-api-access-w4xh8\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:15 crc kubenswrapper[5081]: I1003 17:04:15.419759 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lhfpn" event={"ID":"e3b37e5d-f3e7-4362-a5fe-84df1d174d7d","Type":"ContainerDied","Data":"7426c2a3a15befdd0eda444fcf649a853ed4d4fb00b1645179bdd3c59a0545a1"} Oct 03 17:04:15 crc kubenswrapper[5081]: I1003 17:04:15.420060 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7426c2a3a15befdd0eda444fcf649a853ed4d4fb00b1645179bdd3c59a0545a1" Oct 03 17:04:15 crc kubenswrapper[5081]: I1003 17:04:15.420110 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lhfpn" Oct 03 17:04:17 crc kubenswrapper[5081]: I1003 17:04:17.834894 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:04:17 crc kubenswrapper[5081]: E1003 17:04:17.839247 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.657210 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-3601-account-create-xdcn7"] Oct 03 17:04:21 crc kubenswrapper[5081]: E1003 17:04:21.657882 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3b37e5d-f3e7-4362-a5fe-84df1d174d7d" containerName="mariadb-database-create" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.657893 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3b37e5d-f3e7-4362-a5fe-84df1d174d7d" containerName="mariadb-database-create" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.658079 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3b37e5d-f3e7-4362-a5fe-84df1d174d7d" containerName="mariadb-database-create" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.658740 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3601-account-create-xdcn7" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.661282 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.671646 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3601-account-create-xdcn7"] Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.785605 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvcjm\" (UniqueName: \"kubernetes.io/projected/78df042a-965a-4611-9d0a-194098083135-kube-api-access-pvcjm\") pod \"cinder-3601-account-create-xdcn7\" (UID: \"78df042a-965a-4611-9d0a-194098083135\") " pod="openstack/cinder-3601-account-create-xdcn7" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.887841 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvcjm\" (UniqueName: \"kubernetes.io/projected/78df042a-965a-4611-9d0a-194098083135-kube-api-access-pvcjm\") pod \"cinder-3601-account-create-xdcn7\" (UID: \"78df042a-965a-4611-9d0a-194098083135\") " pod="openstack/cinder-3601-account-create-xdcn7" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.909667 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvcjm\" (UniqueName: \"kubernetes.io/projected/78df042a-965a-4611-9d0a-194098083135-kube-api-access-pvcjm\") pod \"cinder-3601-account-create-xdcn7\" (UID: \"78df042a-965a-4611-9d0a-194098083135\") " pod="openstack/cinder-3601-account-create-xdcn7" Oct 03 17:04:21 crc kubenswrapper[5081]: I1003 17:04:21.984023 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3601-account-create-xdcn7" Oct 03 17:04:22 crc kubenswrapper[5081]: I1003 17:04:22.428349 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3601-account-create-xdcn7"] Oct 03 17:04:22 crc kubenswrapper[5081]: I1003 17:04:22.478198 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3601-account-create-xdcn7" event={"ID":"78df042a-965a-4611-9d0a-194098083135","Type":"ContainerStarted","Data":"594fee58224cfe524490e8bbdaaad9d7f9b577fa707aa497b7d1d487c896b6c6"} Oct 03 17:04:23 crc kubenswrapper[5081]: I1003 17:04:23.489260 5081 generic.go:334] "Generic (PLEG): container finished" podID="78df042a-965a-4611-9d0a-194098083135" containerID="db66595cf36bc07d831ff8f3010834a7f223de38fc613bd88ae3a25417991442" exitCode=0 Oct 03 17:04:23 crc kubenswrapper[5081]: I1003 17:04:23.489448 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3601-account-create-xdcn7" event={"ID":"78df042a-965a-4611-9d0a-194098083135","Type":"ContainerDied","Data":"db66595cf36bc07d831ff8f3010834a7f223de38fc613bd88ae3a25417991442"} Oct 03 17:04:24 crc kubenswrapper[5081]: I1003 17:04:24.822220 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3601-account-create-xdcn7" Oct 03 17:04:24 crc kubenswrapper[5081]: I1003 17:04:24.940968 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvcjm\" (UniqueName: \"kubernetes.io/projected/78df042a-965a-4611-9d0a-194098083135-kube-api-access-pvcjm\") pod \"78df042a-965a-4611-9d0a-194098083135\" (UID: \"78df042a-965a-4611-9d0a-194098083135\") " Oct 03 17:04:24 crc kubenswrapper[5081]: I1003 17:04:24.946663 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78df042a-965a-4611-9d0a-194098083135-kube-api-access-pvcjm" (OuterVolumeSpecName: "kube-api-access-pvcjm") pod "78df042a-965a-4611-9d0a-194098083135" (UID: "78df042a-965a-4611-9d0a-194098083135"). InnerVolumeSpecName "kube-api-access-pvcjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:25 crc kubenswrapper[5081]: I1003 17:04:25.043705 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvcjm\" (UniqueName: \"kubernetes.io/projected/78df042a-965a-4611-9d0a-194098083135-kube-api-access-pvcjm\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:25 crc kubenswrapper[5081]: I1003 17:04:25.517028 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3601-account-create-xdcn7" event={"ID":"78df042a-965a-4611-9d0a-194098083135","Type":"ContainerDied","Data":"594fee58224cfe524490e8bbdaaad9d7f9b577fa707aa497b7d1d487c896b6c6"} Oct 03 17:04:25 crc kubenswrapper[5081]: I1003 17:04:25.517089 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="594fee58224cfe524490e8bbdaaad9d7f9b577fa707aa497b7d1d487c896b6c6" Oct 03 17:04:25 crc kubenswrapper[5081]: I1003 17:04:25.517212 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3601-account-create-xdcn7" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.828314 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-gvzpw"] Oct 03 17:04:26 crc kubenswrapper[5081]: E1003 17:04:26.829131 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78df042a-965a-4611-9d0a-194098083135" containerName="mariadb-account-create" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.829149 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="78df042a-965a-4611-9d0a-194098083135" containerName="mariadb-account-create" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.829373 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="78df042a-965a-4611-9d0a-194098083135" containerName="mariadb-account-create" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.830189 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.833393 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-x2cvb" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.834419 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.837836 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.840511 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-gvzpw"] Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.991095 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-combined-ca-bundle\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.991255 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-etc-machine-id\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.991377 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-scripts\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.991409 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcb7d\" (UniqueName: \"kubernetes.io/projected/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-kube-api-access-vcb7d\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.992953 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-db-sync-config-data\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:26 crc kubenswrapper[5081]: I1003 17:04:26.993334 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-config-data\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.095411 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-db-sync-config-data\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.095486 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-config-data\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.095525 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-combined-ca-bundle\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.095592 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-etc-machine-id\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.095641 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-scripts\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.095664 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcb7d\" (UniqueName: \"kubernetes.io/projected/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-kube-api-access-vcb7d\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.095805 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-etc-machine-id\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.101337 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-db-sync-config-data\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.101851 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-config-data\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.106434 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-combined-ca-bundle\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.108062 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-scripts\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.113725 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcb7d\" (UniqueName: \"kubernetes.io/projected/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-kube-api-access-vcb7d\") pod \"cinder-db-sync-gvzpw\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.160006 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:27 crc kubenswrapper[5081]: I1003 17:04:27.621085 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-gvzpw"] Oct 03 17:04:28 crc kubenswrapper[5081]: I1003 17:04:28.551178 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-gvzpw" event={"ID":"196ad8b4-8438-4dd2-8062-cd48ce2c8fca","Type":"ContainerStarted","Data":"bd45a2291b6a982f361416a8fa9d4261fd2dcc21edc6720d2067252d1c2c45a8"} Oct 03 17:04:28 crc kubenswrapper[5081]: I1003 17:04:28.551794 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-gvzpw" event={"ID":"196ad8b4-8438-4dd2-8062-cd48ce2c8fca","Type":"ContainerStarted","Data":"0d8510a9e480562f5f4a5ea2056a8825719a7760940a30ee282581dab30b7de6"} Oct 03 17:04:28 crc kubenswrapper[5081]: I1003 17:04:28.575510 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-gvzpw" podStartSLOduration=2.575489243 podStartE2EDuration="2.575489243s" podCreationTimestamp="2025-10-03 17:04:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:28.565542017 +0000 UTC m=+5787.531098630" watchObservedRunningTime="2025-10-03 17:04:28.575489243 +0000 UTC m=+5787.541045876" Oct 03 17:04:31 crc kubenswrapper[5081]: I1003 17:04:31.601117 5081 generic.go:334] "Generic (PLEG): container finished" podID="196ad8b4-8438-4dd2-8062-cd48ce2c8fca" containerID="bd45a2291b6a982f361416a8fa9d4261fd2dcc21edc6720d2067252d1c2c45a8" exitCode=0 Oct 03 17:04:31 crc kubenswrapper[5081]: I1003 17:04:31.601207 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-gvzpw" event={"ID":"196ad8b4-8438-4dd2-8062-cd48ce2c8fca","Type":"ContainerDied","Data":"bd45a2291b6a982f361416a8fa9d4261fd2dcc21edc6720d2067252d1c2c45a8"} Oct 03 17:04:31 crc kubenswrapper[5081]: I1003 17:04:31.828905 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:04:31 crc kubenswrapper[5081]: E1003 17:04:31.829422 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.090244 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.221702 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-config-data\") pod \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.221813 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-scripts\") pod \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.221886 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-etc-machine-id\") pod \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.221988 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcb7d\" (UniqueName: \"kubernetes.io/projected/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-kube-api-access-vcb7d\") pod \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.222116 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-combined-ca-bundle\") pod \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.222169 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-db-sync-config-data\") pod \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\" (UID: \"196ad8b4-8438-4dd2-8062-cd48ce2c8fca\") " Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.223067 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "196ad8b4-8438-4dd2-8062-cd48ce2c8fca" (UID: "196ad8b4-8438-4dd2-8062-cd48ce2c8fca"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.229800 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "196ad8b4-8438-4dd2-8062-cd48ce2c8fca" (UID: "196ad8b4-8438-4dd2-8062-cd48ce2c8fca"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.229840 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-kube-api-access-vcb7d" (OuterVolumeSpecName: "kube-api-access-vcb7d") pod "196ad8b4-8438-4dd2-8062-cd48ce2c8fca" (UID: "196ad8b4-8438-4dd2-8062-cd48ce2c8fca"). InnerVolumeSpecName "kube-api-access-vcb7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.229842 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-scripts" (OuterVolumeSpecName: "scripts") pod "196ad8b4-8438-4dd2-8062-cd48ce2c8fca" (UID: "196ad8b4-8438-4dd2-8062-cd48ce2c8fca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.251075 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "196ad8b4-8438-4dd2-8062-cd48ce2c8fca" (UID: "196ad8b4-8438-4dd2-8062-cd48ce2c8fca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.272060 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-config-data" (OuterVolumeSpecName: "config-data") pod "196ad8b4-8438-4dd2-8062-cd48ce2c8fca" (UID: "196ad8b4-8438-4dd2-8062-cd48ce2c8fca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.324954 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.324983 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcb7d\" (UniqueName: \"kubernetes.io/projected/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-kube-api-access-vcb7d\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.324992 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.325000 5081 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.325011 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.325019 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/196ad8b4-8438-4dd2-8062-cd48ce2c8fca-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.637993 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-gvzpw" event={"ID":"196ad8b4-8438-4dd2-8062-cd48ce2c8fca","Type":"ContainerDied","Data":"0d8510a9e480562f5f4a5ea2056a8825719a7760940a30ee282581dab30b7de6"} Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.638368 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d8510a9e480562f5f4a5ea2056a8825719a7760940a30ee282581dab30b7de6" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.638057 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-gvzpw" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.926819 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-565544976c-mklxz"] Oct 03 17:04:33 crc kubenswrapper[5081]: E1003 17:04:33.938401 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="196ad8b4-8438-4dd2-8062-cd48ce2c8fca" containerName="cinder-db-sync" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.938568 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="196ad8b4-8438-4dd2-8062-cd48ce2c8fca" containerName="cinder-db-sync" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.941521 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="196ad8b4-8438-4dd2-8062-cd48ce2c8fca" containerName="cinder-db-sync" Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.942475 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-565544976c-mklxz"] Oct 03 17:04:33 crc kubenswrapper[5081]: I1003 17:04:33.942654 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.037678 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-dns-svc\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.037728 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8hjw\" (UniqueName: \"kubernetes.io/projected/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-kube-api-access-h8hjw\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.037754 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-nb\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.037775 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-sb\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.037885 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-config\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.073860 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.084918 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.093506 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.093531 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.093613 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.094236 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.094279 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-x2cvb" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.139469 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-dns-svc\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.139515 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8hjw\" (UniqueName: \"kubernetes.io/projected/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-kube-api-access-h8hjw\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.139541 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-nb\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.139576 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-sb\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.139612 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-config\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.140781 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-nb\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.140948 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-sb\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.141363 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-config\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.141752 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-dns-svc\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.174153 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8hjw\" (UniqueName: \"kubernetes.io/projected/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-kube-api-access-h8hjw\") pod \"dnsmasq-dns-565544976c-mklxz\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.241374 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-scripts\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.241739 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.241777 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6585t\" (UniqueName: \"kubernetes.io/projected/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-kube-api-access-6585t\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.241817 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.241903 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-logs\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.241948 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.241968 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data-custom\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.268742 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344101 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-logs\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344180 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344237 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data-custom\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344296 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-scripts\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344339 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344377 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6585t\" (UniqueName: \"kubernetes.io/projected/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-kube-api-access-6585t\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344446 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344629 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.344714 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-logs\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.349796 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.350084 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.350460 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data-custom\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.352908 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-scripts\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.366052 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6585t\" (UniqueName: \"kubernetes.io/projected/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-kube-api-access-6585t\") pod \"cinder-api-0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.403091 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.715349 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:04:34 crc kubenswrapper[5081]: I1003 17:04:34.748716 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-565544976c-mklxz"] Oct 03 17:04:34 crc kubenswrapper[5081]: W1003 17:04:34.760933 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd9e84b4_3aa8_4013_b48d_90c1c0e04215.slice/crio-169c3c5944a67f6e7389ac56ae14c0d83b09905a7bc9f48aad853414d3667f77 WatchSource:0}: Error finding container 169c3c5944a67f6e7389ac56ae14c0d83b09905a7bc9f48aad853414d3667f77: Status 404 returned error can't find the container with id 169c3c5944a67f6e7389ac56ae14c0d83b09905a7bc9f48aad853414d3667f77 Oct 03 17:04:35 crc kubenswrapper[5081]: I1003 17:04:35.683987 5081 generic.go:334] "Generic (PLEG): container finished" podID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerID="8ccc246d5b64859981f87ddc8483cff5b5f42f51c32f6c3c37be32afc94fab73" exitCode=0 Oct 03 17:04:35 crc kubenswrapper[5081]: I1003 17:04:35.684218 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565544976c-mklxz" event={"ID":"fd9e84b4-3aa8-4013-b48d-90c1c0e04215","Type":"ContainerDied","Data":"8ccc246d5b64859981f87ddc8483cff5b5f42f51c32f6c3c37be32afc94fab73"} Oct 03 17:04:35 crc kubenswrapper[5081]: I1003 17:04:35.684639 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565544976c-mklxz" event={"ID":"fd9e84b4-3aa8-4013-b48d-90c1c0e04215","Type":"ContainerStarted","Data":"169c3c5944a67f6e7389ac56ae14c0d83b09905a7bc9f48aad853414d3667f77"} Oct 03 17:04:35 crc kubenswrapper[5081]: I1003 17:04:35.687251 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8eb8af2d-c868-4fe5-9281-06404bfcc5f0","Type":"ContainerStarted","Data":"2066507e2ae0783798c881ba72cd696bf15a000b07090a27f0c315686bfd2164"} Oct 03 17:04:35 crc kubenswrapper[5081]: I1003 17:04:35.687282 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8eb8af2d-c868-4fe5-9281-06404bfcc5f0","Type":"ContainerStarted","Data":"ccff6b320aab3c7968ce924302edfbbf0d1e1f88862f677948e2563f2912a4e0"} Oct 03 17:04:36 crc kubenswrapper[5081]: I1003 17:04:36.700303 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8eb8af2d-c868-4fe5-9281-06404bfcc5f0","Type":"ContainerStarted","Data":"079c5c0491bf28bfc03b44051a6094914207bbdff21a9fde74bd6104dee46096"} Oct 03 17:04:36 crc kubenswrapper[5081]: I1003 17:04:36.700662 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 17:04:36 crc kubenswrapper[5081]: I1003 17:04:36.704082 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565544976c-mklxz" event={"ID":"fd9e84b4-3aa8-4013-b48d-90c1c0e04215","Type":"ContainerStarted","Data":"ebe8ae23fef3f4cfec9d42b20a833ced18e8c2ff9b316e59819248277886b5d2"} Oct 03 17:04:36 crc kubenswrapper[5081]: I1003 17:04:36.704221 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:36 crc kubenswrapper[5081]: I1003 17:04:36.720619 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.720595964 podStartE2EDuration="2.720595964s" podCreationTimestamp="2025-10-03 17:04:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:36.716677251 +0000 UTC m=+5795.682233874" watchObservedRunningTime="2025-10-03 17:04:36.720595964 +0000 UTC m=+5795.686152577" Oct 03 17:04:36 crc kubenswrapper[5081]: I1003 17:04:36.743770 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-565544976c-mklxz" podStartSLOduration=3.7437528 podStartE2EDuration="3.7437528s" podCreationTimestamp="2025-10-03 17:04:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:36.74201229 +0000 UTC m=+5795.707568923" watchObservedRunningTime="2025-10-03 17:04:36.7437528 +0000 UTC m=+5795.709309413" Oct 03 17:04:43 crc kubenswrapper[5081]: I1003 17:04:43.830758 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:04:43 crc kubenswrapper[5081]: E1003 17:04:43.831431 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.270008 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.326803 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5855f8f99f-vpxd2"] Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.327024 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" podUID="5532c04d-b48c-4db2-932c-c8036973e596" containerName="dnsmasq-dns" containerID="cri-o://536c583b39cea92bd745c9a883de4dd50d0725adcb64a94c39d26664abf58dc1" gracePeriod=10 Oct 03 17:04:44 crc kubenswrapper[5081]: E1003 17:04:44.463390 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5532c04d_b48c_4db2_932c_c8036973e596.slice/crio-536c583b39cea92bd745c9a883de4dd50d0725adcb64a94c39d26664abf58dc1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5532c04d_b48c_4db2_932c_c8036973e596.slice/crio-conmon-536c583b39cea92bd745c9a883de4dd50d0725adcb64a94c39d26664abf58dc1.scope\": RecentStats: unable to find data in memory cache]" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.792928 5081 generic.go:334] "Generic (PLEG): container finished" podID="5532c04d-b48c-4db2-932c-c8036973e596" containerID="536c583b39cea92bd745c9a883de4dd50d0725adcb64a94c39d26664abf58dc1" exitCode=0 Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.793256 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" event={"ID":"5532c04d-b48c-4db2-932c-c8036973e596","Type":"ContainerDied","Data":"536c583b39cea92bd745c9a883de4dd50d0725adcb64a94c39d26664abf58dc1"} Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.793328 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" event={"ID":"5532c04d-b48c-4db2-932c-c8036973e596","Type":"ContainerDied","Data":"bd208c305d2bd7f53d78ba687cedb347b111afecc8e491ca996c05f505a694c3"} Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.793343 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd208c305d2bd7f53d78ba687cedb347b111afecc8e491ca996c05f505a694c3" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.821616 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.939507 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sctxw\" (UniqueName: \"kubernetes.io/projected/5532c04d-b48c-4db2-932c-c8036973e596-kube-api-access-sctxw\") pod \"5532c04d-b48c-4db2-932c-c8036973e596\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.940978 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-config\") pod \"5532c04d-b48c-4db2-932c-c8036973e596\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.941126 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-sb\") pod \"5532c04d-b48c-4db2-932c-c8036973e596\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.941295 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-nb\") pod \"5532c04d-b48c-4db2-932c-c8036973e596\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.941387 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-dns-svc\") pod \"5532c04d-b48c-4db2-932c-c8036973e596\" (UID: \"5532c04d-b48c-4db2-932c-c8036973e596\") " Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.947812 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5532c04d-b48c-4db2-932c-c8036973e596-kube-api-access-sctxw" (OuterVolumeSpecName: "kube-api-access-sctxw") pod "5532c04d-b48c-4db2-932c-c8036973e596" (UID: "5532c04d-b48c-4db2-932c-c8036973e596"). InnerVolumeSpecName "kube-api-access-sctxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.984812 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5532c04d-b48c-4db2-932c-c8036973e596" (UID: "5532c04d-b48c-4db2-932c-c8036973e596"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.987688 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5532c04d-b48c-4db2-932c-c8036973e596" (UID: "5532c04d-b48c-4db2-932c-c8036973e596"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:44 crc kubenswrapper[5081]: I1003 17:04:44.993372 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-config" (OuterVolumeSpecName: "config") pod "5532c04d-b48c-4db2-932c-c8036973e596" (UID: "5532c04d-b48c-4db2-932c-c8036973e596"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.004694 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5532c04d-b48c-4db2-932c-c8036973e596" (UID: "5532c04d-b48c-4db2-932c-c8036973e596"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.044232 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sctxw\" (UniqueName: \"kubernetes.io/projected/5532c04d-b48c-4db2-932c-c8036973e596-kube-api-access-sctxw\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.044273 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.044292 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.044303 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.044314 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5532c04d-b48c-4db2-932c-c8036973e596-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.804267 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5855f8f99f-vpxd2" Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.842473 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5855f8f99f-vpxd2"] Oct 03 17:04:45 crc kubenswrapper[5081]: I1003 17:04:45.849583 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5855f8f99f-vpxd2"] Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.388899 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.672271 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.672928 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c70c657d-63c0-4be3-93b7-73288e10c7d0" containerName="nova-scheduler-scheduler" containerID="cri-o://11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" gracePeriod=30 Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.689396 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.689854 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="0d01708d-75cb-411e-95ac-d3458148063e" containerName="nova-cell0-conductor-conductor" containerID="cri-o://5b412de9f22f243cc157c9c6bb27e1610b23d48c125e4ebd5c76d4cb4bd4fb96" gracePeriod=30 Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.706192 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.706492 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="7c45a8eb-08c5-4a88-ae2f-835953e14a86" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146" gracePeriod=30 Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.713184 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.713375 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-log" containerID="cri-o://e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446" gracePeriod=30 Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.713482 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-metadata" containerID="cri-o://27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4" gracePeriod=30 Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.721381 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.721603 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-log" containerID="cri-o://ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23" gracePeriod=30 Oct 03 17:04:46 crc kubenswrapper[5081]: I1003 17:04:46.721738 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-api" containerID="cri-o://3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de" gracePeriod=30 Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.465403 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:04:47 crc kubenswrapper[5081]: E1003 17:04:47.499666 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 17:04:47 crc kubenswrapper[5081]: E1003 17:04:47.503262 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 17:04:47 crc kubenswrapper[5081]: E1003 17:04:47.504764 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 17:04:47 crc kubenswrapper[5081]: E1003 17:04:47.504801 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c70c657d-63c0-4be3-93b7-73288e10c7d0" containerName="nova-scheduler-scheduler" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.599972 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-combined-ca-bundle\") pod \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.600293 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-config-data\") pod \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.600335 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4brdb\" (UniqueName: \"kubernetes.io/projected/7c45a8eb-08c5-4a88-ae2f-835953e14a86-kube-api-access-4brdb\") pod \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\" (UID: \"7c45a8eb-08c5-4a88-ae2f-835953e14a86\") " Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.606879 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c45a8eb-08c5-4a88-ae2f-835953e14a86-kube-api-access-4brdb" (OuterVolumeSpecName: "kube-api-access-4brdb") pod "7c45a8eb-08c5-4a88-ae2f-835953e14a86" (UID: "7c45a8eb-08c5-4a88-ae2f-835953e14a86"). InnerVolumeSpecName "kube-api-access-4brdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.638386 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c45a8eb-08c5-4a88-ae2f-835953e14a86" (UID: "7c45a8eb-08c5-4a88-ae2f-835953e14a86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.640655 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-config-data" (OuterVolumeSpecName: "config-data") pod "7c45a8eb-08c5-4a88-ae2f-835953e14a86" (UID: "7c45a8eb-08c5-4a88-ae2f-835953e14a86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.702493 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.702531 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4brdb\" (UniqueName: \"kubernetes.io/projected/7c45a8eb-08c5-4a88-ae2f-835953e14a86-kube-api-access-4brdb\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.702542 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c45a8eb-08c5-4a88-ae2f-835953e14a86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.839552 5081 generic.go:334] "Generic (PLEG): container finished" podID="7c45a8eb-08c5-4a88-ae2f-835953e14a86" containerID="69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146" exitCode=0 Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.839698 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.848302 5081 generic.go:334] "Generic (PLEG): container finished" podID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerID="ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23" exitCode=143 Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.849696 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5532c04d-b48c-4db2-932c-c8036973e596" path="/var/lib/kubelet/pods/5532c04d-b48c-4db2-932c-c8036973e596/volumes" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.850658 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7c45a8eb-08c5-4a88-ae2f-835953e14a86","Type":"ContainerDied","Data":"69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146"} Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.850687 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7c45a8eb-08c5-4a88-ae2f-835953e14a86","Type":"ContainerDied","Data":"4a141bf3c8a93264f993466718b9af4e104f9e441ae51c19eae431486ba3f5b9"} Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.850698 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc","Type":"ContainerDied","Data":"ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23"} Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.850719 5081 scope.go:117] "RemoveContainer" containerID="69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.857324 5081 generic.go:334] "Generic (PLEG): container finished" podID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerID="e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446" exitCode=143 Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.857363 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af","Type":"ContainerDied","Data":"e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446"} Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.881040 5081 scope.go:117] "RemoveContainer" containerID="69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146" Oct 03 17:04:47 crc kubenswrapper[5081]: E1003 17:04:47.881474 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146\": container with ID starting with 69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146 not found: ID does not exist" containerID="69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146" Oct 03 17:04:47 crc kubenswrapper[5081]: I1003 17:04:47.881508 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146"} err="failed to get container status \"69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146\": rpc error: code = NotFound desc = could not find container \"69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146\": container with ID starting with 69fc9c6f1fae04041cfbf3984d900a5c1873496209224c03cc9022108d5f2146 not found: ID does not exist" Oct 03 17:04:48 crc kubenswrapper[5081]: I1003 17:04:48.878059 5081 generic.go:334] "Generic (PLEG): container finished" podID="0d01708d-75cb-411e-95ac-d3458148063e" containerID="5b412de9f22f243cc157c9c6bb27e1610b23d48c125e4ebd5c76d4cb4bd4fb96" exitCode=0 Oct 03 17:04:48 crc kubenswrapper[5081]: I1003 17:04:48.878298 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0d01708d-75cb-411e-95ac-d3458148063e","Type":"ContainerDied","Data":"5b412de9f22f243cc157c9c6bb27e1610b23d48c125e4ebd5c76d4cb4bd4fb96"} Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.060684 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.130007 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-config-data\") pod \"0d01708d-75cb-411e-95ac-d3458148063e\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.130262 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-combined-ca-bundle\") pod \"0d01708d-75cb-411e-95ac-d3458148063e\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.130314 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nm8bg\" (UniqueName: \"kubernetes.io/projected/0d01708d-75cb-411e-95ac-d3458148063e-kube-api-access-nm8bg\") pod \"0d01708d-75cb-411e-95ac-d3458148063e\" (UID: \"0d01708d-75cb-411e-95ac-d3458148063e\") " Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.136159 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d01708d-75cb-411e-95ac-d3458148063e-kube-api-access-nm8bg" (OuterVolumeSpecName: "kube-api-access-nm8bg") pod "0d01708d-75cb-411e-95ac-d3458148063e" (UID: "0d01708d-75cb-411e-95ac-d3458148063e"). InnerVolumeSpecName "kube-api-access-nm8bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.160200 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-config-data" (OuterVolumeSpecName: "config-data") pod "0d01708d-75cb-411e-95ac-d3458148063e" (UID: "0d01708d-75cb-411e-95ac-d3458148063e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.160921 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d01708d-75cb-411e-95ac-d3458148063e" (UID: "0d01708d-75cb-411e-95ac-d3458148063e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.232250 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nm8bg\" (UniqueName: \"kubernetes.io/projected/0d01708d-75cb-411e-95ac-d3458148063e-kube-api-access-nm8bg\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.232479 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.232597 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d01708d-75cb-411e-95ac-d3458148063e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.852301 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": read tcp 10.217.0.2:45416->10.217.1.73:8775: read: connection reset by peer" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.852387 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": read tcp 10.217.0.2:45428->10.217.1.73:8775: read: connection reset by peer" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.916644 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0d01708d-75cb-411e-95ac-d3458148063e","Type":"ContainerDied","Data":"3752891d8a21c3515c1b4aea73a92f0cf750d6d3a930dd9e5d9c77d1f9bcab04"} Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.917656 5081 scope.go:117] "RemoveContainer" containerID="5b412de9f22f243cc157c9c6bb27e1610b23d48c125e4ebd5c76d4cb4bd4fb96" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.917107 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.939037 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:04:49 crc kubenswrapper[5081]: I1003 17:04:49.939243 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" containerName="nova-cell1-conductor-conductor" containerID="cri-o://4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" gracePeriod=30 Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.071620 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.106655 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.159819 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:04:50 crc kubenswrapper[5081]: E1003 17:04:50.160291 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d01708d-75cb-411e-95ac-d3458148063e" containerName="nova-cell0-conductor-conductor" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.160309 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d01708d-75cb-411e-95ac-d3458148063e" containerName="nova-cell0-conductor-conductor" Oct 03 17:04:50 crc kubenswrapper[5081]: E1003 17:04:50.160338 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5532c04d-b48c-4db2-932c-c8036973e596" containerName="dnsmasq-dns" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.160346 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5532c04d-b48c-4db2-932c-c8036973e596" containerName="dnsmasq-dns" Oct 03 17:04:50 crc kubenswrapper[5081]: E1003 17:04:50.160364 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c45a8eb-08c5-4a88-ae2f-835953e14a86" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.160395 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c45a8eb-08c5-4a88-ae2f-835953e14a86" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 17:04:50 crc kubenswrapper[5081]: E1003 17:04:50.160441 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5532c04d-b48c-4db2-932c-c8036973e596" containerName="init" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.160449 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5532c04d-b48c-4db2-932c-c8036973e596" containerName="init" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.160706 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5532c04d-b48c-4db2-932c-c8036973e596" containerName="dnsmasq-dns" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.160731 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c45a8eb-08c5-4a88-ae2f-835953e14a86" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.160742 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d01708d-75cb-411e-95ac-d3458148063e" containerName="nova-cell0-conductor-conductor" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.161553 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.165419 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.170095 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.261724 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ptm2\" (UniqueName: \"kubernetes.io/projected/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-kube-api-access-9ptm2\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.261772 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.261814 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.363140 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.363840 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ptm2\" (UniqueName: \"kubernetes.io/projected/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-kube-api-access-9ptm2\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.364038 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.370709 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.374546 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.385555 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ptm2\" (UniqueName: \"kubernetes.io/projected/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-kube-api-access-9ptm2\") pod \"nova-cell0-conductor-0\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.472327 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.479139 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.518377 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567212 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-combined-ca-bundle\") pod \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567285 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkstd\" (UniqueName: \"kubernetes.io/projected/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-kube-api-access-jkstd\") pod \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567359 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-config-data\") pod \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567383 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvfpw\" (UniqueName: \"kubernetes.io/projected/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-kube-api-access-kvfpw\") pod \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567404 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-combined-ca-bundle\") pod \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567527 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-logs\") pod \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567569 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-logs\") pod \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\" (UID: \"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.567998 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-config-data\") pod \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\" (UID: \"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc\") " Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.569911 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-logs" (OuterVolumeSpecName: "logs") pod "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" (UID: "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.570147 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-logs" (OuterVolumeSpecName: "logs") pod "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" (UID: "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.572396 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-kube-api-access-jkstd" (OuterVolumeSpecName: "kube-api-access-jkstd") pod "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" (UID: "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc"). InnerVolumeSpecName "kube-api-access-jkstd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.576008 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-kube-api-access-kvfpw" (OuterVolumeSpecName: "kube-api-access-kvfpw") pod "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" (UID: "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af"). InnerVolumeSpecName "kube-api-access-kvfpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.602232 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-config-data" (OuterVolumeSpecName: "config-data") pod "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" (UID: "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.603115 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" (UID: "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.605467 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-config-data" (OuterVolumeSpecName: "config-data") pod "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" (UID: "c2d2a3c2-3c46-49e7-9a38-cf4f42d910af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.623208 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" (UID: "806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672054 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672086 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkstd\" (UniqueName: \"kubernetes.io/projected/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-kube-api-access-jkstd\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672095 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672104 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvfpw\" (UniqueName: \"kubernetes.io/projected/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-kube-api-access-kvfpw\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672115 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672123 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672130 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.672139 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.929846 5081 generic.go:334] "Generic (PLEG): container finished" podID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerID="3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de" exitCode=0 Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.929907 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.929920 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc","Type":"ContainerDied","Data":"3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de"} Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.929953 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc","Type":"ContainerDied","Data":"25227166d9d19f3a2765f72cf17b67bb4a0217739bc7e2e285dae49467624d59"} Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.929973 5081 scope.go:117] "RemoveContainer" containerID="3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.933517 5081 generic.go:334] "Generic (PLEG): container finished" podID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerID="27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4" exitCode=0 Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.933587 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af","Type":"ContainerDied","Data":"27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4"} Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.933616 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d2a3c2-3c46-49e7-9a38-cf4f42d910af","Type":"ContainerDied","Data":"23516738372c1de853ebe3a3e9d9135fe5ffe7d90c3387c0b1f0c9e495b689cc"} Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.933672 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.966204 5081 scope.go:117] "RemoveContainer" containerID="ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23" Oct 03 17:04:50 crc kubenswrapper[5081]: I1003 17:04:50.975491 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.007867 5081 scope.go:117] "RemoveContainer" containerID="3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.008415 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de\": container with ID starting with 3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de not found: ID does not exist" containerID="3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.008461 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de"} err="failed to get container status \"3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de\": rpc error: code = NotFound desc = could not find container \"3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de\": container with ID starting with 3cc1cd831bd614d66ac13c8476d039d1ed5aec69e24e693c32f3cb37ec8619de not found: ID does not exist" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.008491 5081 scope.go:117] "RemoveContainer" containerID="ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.010204 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23\": container with ID starting with ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23 not found: ID does not exist" containerID="ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.010259 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23"} err="failed to get container status \"ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23\": rpc error: code = NotFound desc = could not find container \"ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23\": container with ID starting with ad5749f79bc345d207422de194e8c8b06982bf6dbfbb252f04e169980003eb23 not found: ID does not exist" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.010288 5081 scope.go:117] "RemoveContainer" containerID="27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.010618 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.062620 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.093227 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.105644 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.106164 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-log" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106181 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-log" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.106197 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-log" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106207 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-log" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.106228 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-api" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106235 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-api" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.106254 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-metadata" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106260 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-metadata" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106458 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-log" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106472 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-log" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106500 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" containerName="nova-api-api" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.106515 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" containerName="nova-metadata-metadata" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.107507 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.108211 5081 scope.go:117] "RemoveContainer" containerID="e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.117375 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.120723 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.122593 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.125250 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.132620 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.142742 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.153321 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.182890 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-config-data\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.182952 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.183019 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41b108a7-949e-41e0-984d-21845d76d4eb-logs\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.183063 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.183110 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee0241b5-68ec-419c-8f4a-64d92e49a60b-logs\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.183137 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2szx9\" (UniqueName: \"kubernetes.io/projected/ee0241b5-68ec-419c-8f4a-64d92e49a60b-kube-api-access-2szx9\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.183186 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-config-data\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.183250 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nhvn\" (UniqueName: \"kubernetes.io/projected/41b108a7-949e-41e0-984d-21845d76d4eb-kube-api-access-5nhvn\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.231646 5081 scope.go:117] "RemoveContainer" containerID="27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.232419 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4\": container with ID starting with 27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4 not found: ID does not exist" containerID="27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.232446 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4"} err="failed to get container status \"27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4\": rpc error: code = NotFound desc = could not find container \"27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4\": container with ID starting with 27dd72c8d483f5f35864dcb78ec1458882e216f04178a36a2029d62b96cadfa4 not found: ID does not exist" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.232467 5081 scope.go:117] "RemoveContainer" containerID="e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.232718 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446\": container with ID starting with e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446 not found: ID does not exist" containerID="e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.232737 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446"} err="failed to get container status \"e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446\": rpc error: code = NotFound desc = could not find container \"e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446\": container with ID starting with e4dd6cb3154dd6dafab35a22afb6282305d5fad9418e42c91a7aff92afc49446 not found: ID does not exist" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.284496 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2szx9\" (UniqueName: \"kubernetes.io/projected/ee0241b5-68ec-419c-8f4a-64d92e49a60b-kube-api-access-2szx9\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.284886 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-config-data\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.284954 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nhvn\" (UniqueName: \"kubernetes.io/projected/41b108a7-949e-41e0-984d-21845d76d4eb-kube-api-access-5nhvn\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.284999 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-config-data\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.285018 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.285054 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41b108a7-949e-41e0-984d-21845d76d4eb-logs\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.285543 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41b108a7-949e-41e0-984d-21845d76d4eb-logs\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.285586 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.285729 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee0241b5-68ec-419c-8f4a-64d92e49a60b-logs\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.286188 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee0241b5-68ec-419c-8f4a-64d92e49a60b-logs\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.292319 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-config-data\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.292351 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-config-data\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.293071 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.293257 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.303264 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2szx9\" (UniqueName: \"kubernetes.io/projected/ee0241b5-68ec-419c-8f4a-64d92e49a60b-kube-api-access-2szx9\") pod \"nova-api-0\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.303497 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nhvn\" (UniqueName: \"kubernetes.io/projected/41b108a7-949e-41e0-984d-21845d76d4eb-kube-api-access-5nhvn\") pod \"nova-metadata-0\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.532390 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.540324 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.863717 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d01708d-75cb-411e-95ac-d3458148063e" path="/var/lib/kubelet/pods/0d01708d-75cb-411e-95ac-d3458148063e/volumes" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.868989 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc" path="/var/lib/kubelet/pods/806a4a4b-aad6-44b5-b2f2-ae85a3be9fdc/volumes" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.870016 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2d2a3c2-3c46-49e7-9a38-cf4f42d910af" path="/var/lib/kubelet/pods/c2d2a3c2-3c46-49e7-9a38-cf4f42d910af/volumes" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.901083 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.960487 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21","Type":"ContainerStarted","Data":"d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d"} Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.961319 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21","Type":"ContainerStarted","Data":"e256609c619a0ab89506e1efb2a741e9d1143c7c4a30036097ddcaf30d245567"} Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.961393 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.964549 5081 generic.go:334] "Generic (PLEG): container finished" podID="c70c657d-63c0-4be3-93b7-73288e10c7d0" containerID="11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" exitCode=0 Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.964686 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c70c657d-63c0-4be3-93b7-73288e10c7d0","Type":"ContainerDied","Data":"11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b"} Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.964773 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c70c657d-63c0-4be3-93b7-73288e10c7d0","Type":"ContainerDied","Data":"dbde67b69be68805a404038722f726e83e60bb5723c10d2b8ddd085e65649695"} Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.964837 5081 scope.go:117] "RemoveContainer" containerID="11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.964972 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.985797 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.9857757600000001 podStartE2EDuration="1.98577576s" podCreationTimestamp="2025-10-03 17:04:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:51.984883965 +0000 UTC m=+5810.950440578" watchObservedRunningTime="2025-10-03 17:04:51.98577576 +0000 UTC m=+5810.951332393" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.991034 5081 scope.go:117] "RemoveContainer" containerID="11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" Oct 03 17:04:51 crc kubenswrapper[5081]: E1003 17:04:51.993013 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b\": container with ID starting with 11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b not found: ID does not exist" containerID="11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b" Oct 03 17:04:51 crc kubenswrapper[5081]: I1003 17:04:51.993052 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b"} err="failed to get container status \"11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b\": rpc error: code = NotFound desc = could not find container \"11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b\": container with ID starting with 11edd49116d86a2e1a2ef87c6e3e377c9a53f32fae8d9bf95f8d54954fd3e24b not found: ID does not exist" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.000935 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk7d5\" (UniqueName: \"kubernetes.io/projected/c70c657d-63c0-4be3-93b7-73288e10c7d0-kube-api-access-gk7d5\") pod \"c70c657d-63c0-4be3-93b7-73288e10c7d0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.000983 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-config-data\") pod \"c70c657d-63c0-4be3-93b7-73288e10c7d0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.001128 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-combined-ca-bundle\") pod \"c70c657d-63c0-4be3-93b7-73288e10c7d0\" (UID: \"c70c657d-63c0-4be3-93b7-73288e10c7d0\") " Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.007411 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c70c657d-63c0-4be3-93b7-73288e10c7d0-kube-api-access-gk7d5" (OuterVolumeSpecName: "kube-api-access-gk7d5") pod "c70c657d-63c0-4be3-93b7-73288e10c7d0" (UID: "c70c657d-63c0-4be3-93b7-73288e10c7d0"). InnerVolumeSpecName "kube-api-access-gk7d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.027860 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-config-data" (OuterVolumeSpecName: "config-data") pod "c70c657d-63c0-4be3-93b7-73288e10c7d0" (UID: "c70c657d-63c0-4be3-93b7-73288e10c7d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.034655 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c70c657d-63c0-4be3-93b7-73288e10c7d0" (UID: "c70c657d-63c0-4be3-93b7-73288e10c7d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.103542 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.105255 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk7d5\" (UniqueName: \"kubernetes.io/projected/c70c657d-63c0-4be3-93b7-73288e10c7d0-kube-api-access-gk7d5\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.105273 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c657d-63c0-4be3-93b7-73288e10c7d0-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.236409 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.392715 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.431618 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.444605 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.450662 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:04:52 crc kubenswrapper[5081]: E1003 17:04:52.451261 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c70c657d-63c0-4be3-93b7-73288e10c7d0" containerName="nova-scheduler-scheduler" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.451327 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c70c657d-63c0-4be3-93b7-73288e10c7d0" containerName="nova-scheduler-scheduler" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.451637 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c70c657d-63c0-4be3-93b7-73288e10c7d0" containerName="nova-scheduler-scheduler" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.454336 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.458068 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.458232 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.517264 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krp8f\" (UniqueName: \"kubernetes.io/projected/da03a230-eac7-4927-a20f-c680a7647aa3-kube-api-access-krp8f\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.517346 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.517440 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-config-data\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.619146 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-config-data\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.619229 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krp8f\" (UniqueName: \"kubernetes.io/projected/da03a230-eac7-4927-a20f-c680a7647aa3-kube-api-access-krp8f\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.619281 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.624424 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.625641 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-config-data\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.637734 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krp8f\" (UniqueName: \"kubernetes.io/projected/da03a230-eac7-4927-a20f-c680a7647aa3-kube-api-access-krp8f\") pod \"nova-scheduler-0\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.770399 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.981838 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ee0241b5-68ec-419c-8f4a-64d92e49a60b","Type":"ContainerStarted","Data":"e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734"} Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.982184 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ee0241b5-68ec-419c-8f4a-64d92e49a60b","Type":"ContainerStarted","Data":"c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a"} Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.982202 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ee0241b5-68ec-419c-8f4a-64d92e49a60b","Type":"ContainerStarted","Data":"140d15a752a8c7d4fa9a9e908656d3a44fa66cbd45f23bfc126e7851dc028b9e"} Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.986496 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41b108a7-949e-41e0-984d-21845d76d4eb","Type":"ContainerStarted","Data":"3983b21ea76676c2f8ce9fb09b8ed8a3748f827017101ce24cc74bd0154b826f"} Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.986539 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41b108a7-949e-41e0-984d-21845d76d4eb","Type":"ContainerStarted","Data":"8cce5a28488bbf244a3f741d2edcd6af96637dc4382513e573271312e771dce2"} Oct 03 17:04:52 crc kubenswrapper[5081]: I1003 17:04:52.986550 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41b108a7-949e-41e0-984d-21845d76d4eb","Type":"ContainerStarted","Data":"b7a6aac07d49018b7792034215adaf124e6f0393687ec3c83a709dd3239ecd06"} Oct 03 17:04:53 crc kubenswrapper[5081]: I1003 17:04:53.009296 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.009277183 podStartE2EDuration="3.009277183s" podCreationTimestamp="2025-10-03 17:04:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:53.002852778 +0000 UTC m=+5811.968409391" watchObservedRunningTime="2025-10-03 17:04:53.009277183 +0000 UTC m=+5811.974833796" Oct 03 17:04:53 crc kubenswrapper[5081]: I1003 17:04:53.032057 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.032043298 podStartE2EDuration="3.032043298s" podCreationTimestamp="2025-10-03 17:04:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:53.028685541 +0000 UTC m=+5811.994242154" watchObservedRunningTime="2025-10-03 17:04:53.032043298 +0000 UTC m=+5811.997599911" Oct 03 17:04:53 crc kubenswrapper[5081]: E1003 17:04:53.203128 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 17:04:53 crc kubenswrapper[5081]: E1003 17:04:53.204730 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 17:04:53 crc kubenswrapper[5081]: E1003 17:04:53.206081 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 17:04:53 crc kubenswrapper[5081]: E1003 17:04:53.206152 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" containerName="nova-cell1-conductor-conductor" Oct 03 17:04:53 crc kubenswrapper[5081]: I1003 17:04:53.239238 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:04:53 crc kubenswrapper[5081]: W1003 17:04:53.252955 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda03a230_eac7_4927_a20f_c680a7647aa3.slice/crio-096c7f07c645429dcb5ba3253c2486b853f4a79e6195f1fbdd61deecd08fc78e WatchSource:0}: Error finding container 096c7f07c645429dcb5ba3253c2486b853f4a79e6195f1fbdd61deecd08fc78e: Status 404 returned error can't find the container with id 096c7f07c645429dcb5ba3253c2486b853f4a79e6195f1fbdd61deecd08fc78e Oct 03 17:04:53 crc kubenswrapper[5081]: I1003 17:04:53.843431 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c70c657d-63c0-4be3-93b7-73288e10c7d0" path="/var/lib/kubelet/pods/c70c657d-63c0-4be3-93b7-73288e10c7d0/volumes" Oct 03 17:04:53 crc kubenswrapper[5081]: I1003 17:04:53.996644 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da03a230-eac7-4927-a20f-c680a7647aa3","Type":"ContainerStarted","Data":"b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7"} Oct 03 17:04:53 crc kubenswrapper[5081]: I1003 17:04:53.996694 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da03a230-eac7-4927-a20f-c680a7647aa3","Type":"ContainerStarted","Data":"096c7f07c645429dcb5ba3253c2486b853f4a79e6195f1fbdd61deecd08fc78e"} Oct 03 17:04:54 crc kubenswrapper[5081]: I1003 17:04:54.019591 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.019575096 podStartE2EDuration="2.019575096s" podCreationTimestamp="2025-10-03 17:04:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:54.017831436 +0000 UTC m=+5812.983388059" watchObservedRunningTime="2025-10-03 17:04:54.019575096 +0000 UTC m=+5812.985131709" Oct 03 17:04:56 crc kubenswrapper[5081]: I1003 17:04:56.540865 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:04:56 crc kubenswrapper[5081]: I1003 17:04:56.541601 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.396538 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.517090 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-combined-ca-bundle\") pod \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.517432 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5rxp\" (UniqueName: \"kubernetes.io/projected/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-kube-api-access-n5rxp\") pod \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.517637 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-config-data\") pod \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\" (UID: \"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2\") " Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.522198 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-kube-api-access-n5rxp" (OuterVolumeSpecName: "kube-api-access-n5rxp") pod "31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" (UID: "31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2"). InnerVolumeSpecName "kube-api-access-n5rxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.544314 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-config-data" (OuterVolumeSpecName: "config-data") pod "31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" (UID: "31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.547782 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" (UID: "31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.619537 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.619580 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5rxp\" (UniqueName: \"kubernetes.io/projected/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-kube-api-access-n5rxp\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.619590 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.770666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 17:04:57 crc kubenswrapper[5081]: I1003 17:04:57.828667 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:04:57 crc kubenswrapper[5081]: E1003 17:04:57.828896 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.051506 5081 generic.go:334] "Generic (PLEG): container finished" podID="31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" containerID="4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" exitCode=0 Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.051575 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2","Type":"ContainerDied","Data":"4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23"} Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.051608 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2","Type":"ContainerDied","Data":"697881178f0faac238656abe591bdf1347af5e0fd94cd040c624876937068a37"} Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.051630 5081 scope.go:117] "RemoveContainer" containerID="4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.051771 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.078831 5081 scope.go:117] "RemoveContainer" containerID="4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" Oct 03 17:04:58 crc kubenswrapper[5081]: E1003 17:04:58.079879 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23\": container with ID starting with 4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23 not found: ID does not exist" containerID="4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.080121 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23"} err="failed to get container status \"4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23\": rpc error: code = NotFound desc = could not find container \"4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23\": container with ID starting with 4d14ae8503581af57baabc7c7383371a83303395d3222474267c63224cb22f23 not found: ID does not exist" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.093680 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.102879 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.110675 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:04:58 crc kubenswrapper[5081]: E1003 17:04:58.111181 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" containerName="nova-cell1-conductor-conductor" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.111195 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" containerName="nova-cell1-conductor-conductor" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.111409 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" containerName="nova-cell1-conductor-conductor" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.112113 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.115326 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.119267 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.233463 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.233583 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.233641 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hb6w\" (UniqueName: \"kubernetes.io/projected/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-kube-api-access-5hb6w\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.334770 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.334836 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hb6w\" (UniqueName: \"kubernetes.io/projected/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-kube-api-access-5hb6w\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.334940 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.339178 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.345195 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.357902 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hb6w\" (UniqueName: \"kubernetes.io/projected/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-kube-api-access-5hb6w\") pod \"nova-cell1-conductor-0\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.431784 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:58 crc kubenswrapper[5081]: I1003 17:04:58.866508 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:04:58 crc kubenswrapper[5081]: W1003 17:04:58.872703 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8b1902e_cdaf_43a2_b41b_b3b93ebc9156.slice/crio-c58eec509086192daec1bfc8cbb0dd4650032f77afb675d2dec20a99b278ca38 WatchSource:0}: Error finding container c58eec509086192daec1bfc8cbb0dd4650032f77afb675d2dec20a99b278ca38: Status 404 returned error can't find the container with id c58eec509086192daec1bfc8cbb0dd4650032f77afb675d2dec20a99b278ca38 Oct 03 17:04:59 crc kubenswrapper[5081]: I1003 17:04:59.064182 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156","Type":"ContainerStarted","Data":"4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37"} Oct 03 17:04:59 crc kubenswrapper[5081]: I1003 17:04:59.064527 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156","Type":"ContainerStarted","Data":"c58eec509086192daec1bfc8cbb0dd4650032f77afb675d2dec20a99b278ca38"} Oct 03 17:04:59 crc kubenswrapper[5081]: I1003 17:04:59.064614 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 03 17:04:59 crc kubenswrapper[5081]: I1003 17:04:59.085719 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.085701684 podStartE2EDuration="1.085701684s" podCreationTimestamp="2025-10-03 17:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:04:59.077003214 +0000 UTC m=+5818.042559827" watchObservedRunningTime="2025-10-03 17:04:59.085701684 +0000 UTC m=+5818.051258297" Oct 03 17:04:59 crc kubenswrapper[5081]: I1003 17:04:59.840983 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2" path="/var/lib/kubelet/pods/31cc3fb5-c219-470b-8a49-c1b1ae4c9cd2/volumes" Oct 03 17:05:00 crc kubenswrapper[5081]: I1003 17:05:00.553057 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 03 17:05:01 crc kubenswrapper[5081]: I1003 17:05:01.532837 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 17:05:01 crc kubenswrapper[5081]: I1003 17:05:01.532898 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 17:05:01 crc kubenswrapper[5081]: I1003 17:05:01.541169 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 17:05:01 crc kubenswrapper[5081]: I1003 17:05:01.541244 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 17:05:02 crc kubenswrapper[5081]: I1003 17:05:02.697755 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.82:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:05:02 crc kubenswrapper[5081]: I1003 17:05:02.697773 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:05:02 crc kubenswrapper[5081]: I1003 17:05:02.697810 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:05:02 crc kubenswrapper[5081]: I1003 17:05:02.697838 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.82:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 17:05:02 crc kubenswrapper[5081]: I1003 17:05:02.771070 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 17:05:02 crc kubenswrapper[5081]: I1003 17:05:02.825858 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 17:05:03 crc kubenswrapper[5081]: I1003 17:05:03.140636 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.213187 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.215611 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.219092 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.225670 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.282915 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.282993 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/671c9d31-71f3-4e55-b100-bdb793be57b5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.283064 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.283103 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88rtw\" (UniqueName: \"kubernetes.io/projected/671c9d31-71f3-4e55-b100-bdb793be57b5-kube-api-access-88rtw\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.283131 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-scripts\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.283150 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.384393 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.384465 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/671c9d31-71f3-4e55-b100-bdb793be57b5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.384494 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.384521 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88rtw\" (UniqueName: \"kubernetes.io/projected/671c9d31-71f3-4e55-b100-bdb793be57b5-kube-api-access-88rtw\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.384577 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-scripts\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.384603 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.385117 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/671c9d31-71f3-4e55-b100-bdb793be57b5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.398178 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-scripts\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.398226 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.398715 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.399014 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.407988 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88rtw\" (UniqueName: \"kubernetes.io/projected/671c9d31-71f3-4e55-b100-bdb793be57b5-kube-api-access-88rtw\") pod \"cinder-scheduler-0\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.535303 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 17:05:05 crc kubenswrapper[5081]: I1003 17:05:05.983606 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:05 crc kubenswrapper[5081]: W1003 17:05:05.987512 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod671c9d31_71f3_4e55_b100_bdb793be57b5.slice/crio-ea264593cb866b212a894de8e7e8678ad7c0c9e23fa0cb3412c5fb6a6d4f40d1 WatchSource:0}: Error finding container ea264593cb866b212a894de8e7e8678ad7c0c9e23fa0cb3412c5fb6a6d4f40d1: Status 404 returned error can't find the container with id ea264593cb866b212a894de8e7e8678ad7c0c9e23fa0cb3412c5fb6a6d4f40d1 Oct 03 17:05:06 crc kubenswrapper[5081]: I1003 17:05:06.125094 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"671c9d31-71f3-4e55-b100-bdb793be57b5","Type":"ContainerStarted","Data":"ea264593cb866b212a894de8e7e8678ad7c0c9e23fa0cb3412c5fb6a6d4f40d1"} Oct 03 17:05:06 crc kubenswrapper[5081]: I1003 17:05:06.857296 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:05:06 crc kubenswrapper[5081]: I1003 17:05:06.857985 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api-log" containerID="cri-o://2066507e2ae0783798c881ba72cd696bf15a000b07090a27f0c315686bfd2164" gracePeriod=30 Oct 03 17:05:06 crc kubenswrapper[5081]: I1003 17:05:06.858113 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api" containerID="cri-o://079c5c0491bf28bfc03b44051a6094914207bbdff21a9fde74bd6104dee46096" gracePeriod=30 Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.138249 5081 generic.go:334] "Generic (PLEG): container finished" podID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerID="2066507e2ae0783798c881ba72cd696bf15a000b07090a27f0c315686bfd2164" exitCode=143 Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.138343 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8eb8af2d-c868-4fe5-9281-06404bfcc5f0","Type":"ContainerDied","Data":"2066507e2ae0783798c881ba72cd696bf15a000b07090a27f0c315686bfd2164"} Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.141254 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"671c9d31-71f3-4e55-b100-bdb793be57b5","Type":"ContainerStarted","Data":"8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202"} Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.141294 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"671c9d31-71f3-4e55-b100-bdb793be57b5","Type":"ContainerStarted","Data":"3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad"} Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.165495 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.165471634 podStartE2EDuration="2.165471634s" podCreationTimestamp="2025-10-03 17:05:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:05:07.158254937 +0000 UTC m=+5826.123811550" watchObservedRunningTime="2025-10-03 17:05:07.165471634 +0000 UTC m=+5826.131028257" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.564795 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.566946 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.577961 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.578683 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.580445 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.583005 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.595302 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.605167 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.634767 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.634815 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-config-data\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.634837 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.634857 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-sys\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.634883 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.634999 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-dev\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635051 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-lib-modules\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635084 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635104 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635138 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635170 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-run\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635326 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635354 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/aa84b169-b917-4fcf-86a8-cfcde993fd80-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635370 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635416 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635477 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635509 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635537 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635617 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635677 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-scripts\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635703 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635727 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx5sl\" (UniqueName: \"kubernetes.io/projected/00a5c66b-f312-4379-9769-64c858f3816a-kube-api-access-sx5sl\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635775 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635832 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-sys\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635923 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.635949 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-run\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.636043 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.636107 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.636178 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.636212 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-dev\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.636299 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/00a5c66b-f312-4379-9769-64c858f3816a-ceph\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.636338 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgp6v\" (UniqueName: \"kubernetes.io/projected/aa84b169-b917-4fcf-86a8-cfcde993fd80-kube-api-access-qgp6v\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739211 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739270 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-dev\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739296 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-lib-modules\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739317 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739336 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739360 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739381 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-run\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739428 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739456 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/aa84b169-b917-4fcf-86a8-cfcde993fd80-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739474 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739503 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739525 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739549 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739591 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739621 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739645 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-scripts\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739665 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739685 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx5sl\" (UniqueName: \"kubernetes.io/projected/00a5c66b-f312-4379-9769-64c858f3816a-kube-api-access-sx5sl\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739706 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739738 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-sys\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739767 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739790 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-run\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739814 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739845 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739874 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739896 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-dev\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739938 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/00a5c66b-f312-4379-9769-64c858f3816a-ceph\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739965 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgp6v\" (UniqueName: \"kubernetes.io/projected/aa84b169-b917-4fcf-86a8-cfcde993fd80-kube-api-access-qgp6v\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.739995 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.740016 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-config-data\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.740038 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.740061 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-sys\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.740159 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-sys\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.742626 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.742676 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-lib-modules\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.742737 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.742939 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.742984 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.743013 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-run\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.743853 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-dev\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.743886 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.744067 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.744345 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-nvme\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.744395 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.744435 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.742635 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-dev\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.745635 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-run\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.745690 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-sys\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.745919 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.745848 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/00a5c66b-f312-4379-9769-64c858f3816a-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.745981 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.746009 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/aa84b169-b917-4fcf-86a8-cfcde993fd80-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.747099 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-config-data-custom\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.748357 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.750041 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.751321 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-config-data\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.752414 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a5c66b-f312-4379-9769-64c858f3816a-scripts\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.753881 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/aa84b169-b917-4fcf-86a8-cfcde993fd80-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.754449 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.756605 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.759423 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa84b169-b917-4fcf-86a8-cfcde993fd80-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.759953 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/00a5c66b-f312-4379-9769-64c858f3816a-ceph\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.762462 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx5sl\" (UniqueName: \"kubernetes.io/projected/00a5c66b-f312-4379-9769-64c858f3816a-kube-api-access-sx5sl\") pod \"cinder-backup-0\" (UID: \"00a5c66b-f312-4379-9769-64c858f3816a\") " pod="openstack/cinder-backup-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.771676 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgp6v\" (UniqueName: \"kubernetes.io/projected/aa84b169-b917-4fcf-86a8-cfcde993fd80-kube-api-access-qgp6v\") pod \"cinder-volume-volume1-0\" (UID: \"aa84b169-b917-4fcf-86a8-cfcde993fd80\") " pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.896410 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:07 crc kubenswrapper[5081]: I1003 17:05:07.907708 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 03 17:05:08 crc kubenswrapper[5081]: I1003 17:05:08.462469 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 03 17:05:08 crc kubenswrapper[5081]: W1003 17:05:08.462908 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00a5c66b_f312_4379_9769_64c858f3816a.slice/crio-b87aa2a05faedf07fee4e73a94e955d550ac1d27e6848e9d5c8ad594fd13dd4c WatchSource:0}: Error finding container b87aa2a05faedf07fee4e73a94e955d550ac1d27e6848e9d5c8ad594fd13dd4c: Status 404 returned error can't find the container with id b87aa2a05faedf07fee4e73a94e955d550ac1d27e6848e9d5c8ad594fd13dd4c Oct 03 17:05:08 crc kubenswrapper[5081]: I1003 17:05:08.463692 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 03 17:05:08 crc kubenswrapper[5081]: I1003 17:05:08.465434 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:05:08 crc kubenswrapper[5081]: I1003 17:05:08.609778 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 03 17:05:08 crc kubenswrapper[5081]: W1003 17:05:08.610186 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa84b169_b917_4fcf_86a8_cfcde993fd80.slice/crio-f9ae33e7af23e6c87dc80c41f49081718d081e197707d84e52bfee2d3819f8e6 WatchSource:0}: Error finding container f9ae33e7af23e6c87dc80c41f49081718d081e197707d84e52bfee2d3819f8e6: Status 404 returned error can't find the container with id f9ae33e7af23e6c87dc80c41f49081718d081e197707d84e52bfee2d3819f8e6 Oct 03 17:05:09 crc kubenswrapper[5081]: I1003 17:05:09.170511 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"aa84b169-b917-4fcf-86a8-cfcde993fd80","Type":"ContainerStarted","Data":"f9ae33e7af23e6c87dc80c41f49081718d081e197707d84e52bfee2d3819f8e6"} Oct 03 17:05:09 crc kubenswrapper[5081]: I1003 17:05:09.171723 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"00a5c66b-f312-4379-9769-64c858f3816a","Type":"ContainerStarted","Data":"b87aa2a05faedf07fee4e73a94e955d550ac1d27e6848e9d5c8ad594fd13dd4c"} Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.016770 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.80:8776/healthcheck\": read tcp 10.217.0.2:40844->10.217.1.80:8776: read: connection reset by peer" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.196485 5081 generic.go:334] "Generic (PLEG): container finished" podID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerID="079c5c0491bf28bfc03b44051a6094914207bbdff21a9fde74bd6104dee46096" exitCode=0 Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.196703 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8eb8af2d-c868-4fe5-9281-06404bfcc5f0","Type":"ContainerDied","Data":"079c5c0491bf28bfc03b44051a6094914207bbdff21a9fde74bd6104dee46096"} Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.199766 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"aa84b169-b917-4fcf-86a8-cfcde993fd80","Type":"ContainerStarted","Data":"5be0ba879ae98d0dfdc54e66836487e0f9b60b1f457336473c0d2bc036e018e1"} Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.221080 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"00a5c66b-f312-4379-9769-64c858f3816a","Type":"ContainerStarted","Data":"f86f06d363fef7a42dd7292cf0819b3b38670ec65eb3de2ea8ea5b917ee3f627"} Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.221130 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"00a5c66b-f312-4379-9769-64c858f3816a","Type":"ContainerStarted","Data":"830105e4c93b25e0f65098ae07bdfec9704ddab823e8a5937c72362ce98c7586"} Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.250190 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.431921634 podStartE2EDuration="3.250174673s" podCreationTimestamp="2025-10-03 17:05:07 +0000 UTC" firstStartedPulling="2025-10-03 17:05:08.465155953 +0000 UTC m=+5827.430712566" lastFinishedPulling="2025-10-03 17:05:09.283409002 +0000 UTC m=+5828.248965605" observedRunningTime="2025-10-03 17:05:10.245130538 +0000 UTC m=+5829.210687151" watchObservedRunningTime="2025-10-03 17:05:10.250174673 +0000 UTC m=+5829.215731296" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.380976 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.513096 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-etc-machine-id\") pod \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.513174 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-scripts\") pod \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.513268 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data\") pod \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.513294 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-logs\") pod \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.513327 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6585t\" (UniqueName: \"kubernetes.io/projected/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-kube-api-access-6585t\") pod \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.513359 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-combined-ca-bundle\") pod \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.513373 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data-custom\") pod \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\" (UID: \"8eb8af2d-c868-4fe5-9281-06404bfcc5f0\") " Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.515731 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8eb8af2d-c868-4fe5-9281-06404bfcc5f0" (UID: "8eb8af2d-c868-4fe5-9281-06404bfcc5f0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.516882 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-logs" (OuterVolumeSpecName: "logs") pod "8eb8af2d-c868-4fe5-9281-06404bfcc5f0" (UID: "8eb8af2d-c868-4fe5-9281-06404bfcc5f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.521583 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8eb8af2d-c868-4fe5-9281-06404bfcc5f0" (UID: "8eb8af2d-c868-4fe5-9281-06404bfcc5f0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.522022 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-kube-api-access-6585t" (OuterVolumeSpecName: "kube-api-access-6585t") pod "8eb8af2d-c868-4fe5-9281-06404bfcc5f0" (UID: "8eb8af2d-c868-4fe5-9281-06404bfcc5f0"). InnerVolumeSpecName "kube-api-access-6585t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.535653 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.536776 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-scripts" (OuterVolumeSpecName: "scripts") pod "8eb8af2d-c868-4fe5-9281-06404bfcc5f0" (UID: "8eb8af2d-c868-4fe5-9281-06404bfcc5f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.571038 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8eb8af2d-c868-4fe5-9281-06404bfcc5f0" (UID: "8eb8af2d-c868-4fe5-9281-06404bfcc5f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.588231 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data" (OuterVolumeSpecName: "config-data") pod "8eb8af2d-c868-4fe5-9281-06404bfcc5f0" (UID: "8eb8af2d-c868-4fe5-9281-06404bfcc5f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.615664 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.615700 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.615709 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.615719 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.615748 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6585t\" (UniqueName: \"kubernetes.io/projected/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-kube-api-access-6585t\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.615760 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:10 crc kubenswrapper[5081]: I1003 17:05:10.615768 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8eb8af2d-c868-4fe5-9281-06404bfcc5f0-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.231643 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8eb8af2d-c868-4fe5-9281-06404bfcc5f0","Type":"ContainerDied","Data":"ccff6b320aab3c7968ce924302edfbbf0d1e1f88862f677948e2563f2912a4e0"} Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.231691 5081 scope.go:117] "RemoveContainer" containerID="079c5c0491bf28bfc03b44051a6094914207bbdff21a9fde74bd6104dee46096" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.231805 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.239334 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"aa84b169-b917-4fcf-86a8-cfcde993fd80","Type":"ContainerStarted","Data":"f84325b2b754265a451bf3ba29bf1e7d3fc6375700f1fbf7cf2df2c71a442336"} Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.300975 5081 scope.go:117] "RemoveContainer" containerID="2066507e2ae0783798c881ba72cd696bf15a000b07090a27f0c315686bfd2164" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.301406 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.222856696 podStartE2EDuration="4.301377422s" podCreationTimestamp="2025-10-03 17:05:07 +0000 UTC" firstStartedPulling="2025-10-03 17:05:08.612157482 +0000 UTC m=+5827.577714095" lastFinishedPulling="2025-10-03 17:05:09.690678208 +0000 UTC m=+5828.656234821" observedRunningTime="2025-10-03 17:05:11.278428512 +0000 UTC m=+5830.243985145" watchObservedRunningTime="2025-10-03 17:05:11.301377422 +0000 UTC m=+5830.266934025" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.318683 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.327150 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.355409 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:05:11 crc kubenswrapper[5081]: E1003 17:05:11.355861 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.355882 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api" Oct 03 17:05:11 crc kubenswrapper[5081]: E1003 17:05:11.355911 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api-log" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.355918 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api-log" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.356093 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.356122 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" containerName="cinder-api-log" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.357140 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.361990 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.370540 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.432100 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-scripts\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.432175 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/69b4808f-07a4-457b-97b9-675631790938-etc-machine-id\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.432210 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-config-data\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.432463 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.432690 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-config-data-custom\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.432941 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29vn2\" (UniqueName: \"kubernetes.io/projected/69b4808f-07a4-457b-97b9-675631790938-kube-api-access-29vn2\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.433065 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69b4808f-07a4-457b-97b9-675631790938-logs\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.534694 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.534771 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-config-data-custom\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.534836 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29vn2\" (UniqueName: \"kubernetes.io/projected/69b4808f-07a4-457b-97b9-675631790938-kube-api-access-29vn2\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.534885 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69b4808f-07a4-457b-97b9-675631790938-logs\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.534974 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-scripts\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.535871 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/69b4808f-07a4-457b-97b9-675631790938-etc-machine-id\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.535965 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-config-data\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.538766 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/69b4808f-07a4-457b-97b9-675631790938-etc-machine-id\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.539096 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69b4808f-07a4-457b-97b9-675631790938-logs\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.540312 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.542931 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.543355 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-config-data-custom\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.544190 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-scripts\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.544631 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-config-data\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.547047 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.552851 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.557075 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.563947 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.567127 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29vn2\" (UniqueName: \"kubernetes.io/projected/69b4808f-07a4-457b-97b9-675631790938-kube-api-access-29vn2\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.574876 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.575327 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b4808f-07a4-457b-97b9-675631790938-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"69b4808f-07a4-457b-97b9-675631790938\") " pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.698922 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 17:05:11 crc kubenswrapper[5081]: I1003 17:05:11.843005 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eb8af2d-c868-4fe5-9281-06404bfcc5f0" path="/var/lib/kubelet/pods/8eb8af2d-c868-4fe5-9281-06404bfcc5f0/volumes" Oct 03 17:05:12 crc kubenswrapper[5081]: I1003 17:05:12.251232 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 17:05:12 crc kubenswrapper[5081]: I1003 17:05:12.253278 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 17:05:12 crc kubenswrapper[5081]: I1003 17:05:12.253764 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 17:05:12 crc kubenswrapper[5081]: I1003 17:05:12.337906 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 17:05:12 crc kubenswrapper[5081]: I1003 17:05:12.827341 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:05:12 crc kubenswrapper[5081]: E1003 17:05:12.827934 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:05:12 crc kubenswrapper[5081]: I1003 17:05:12.897167 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:12 crc kubenswrapper[5081]: I1003 17:05:12.908204 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Oct 03 17:05:13 crc kubenswrapper[5081]: I1003 17:05:13.264804 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"69b4808f-07a4-457b-97b9-675631790938","Type":"ContainerStarted","Data":"70e7468459a36c380c065d2d2a86811ded38f5082f0dfdc5bb3aaa4aafcb4eef"} Oct 03 17:05:13 crc kubenswrapper[5081]: I1003 17:05:13.266140 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"69b4808f-07a4-457b-97b9-675631790938","Type":"ContainerStarted","Data":"ec121545f40a98e8bc458171d9377f2e58f30af5410a8f9807cbd1803a706b85"} Oct 03 17:05:14 crc kubenswrapper[5081]: I1003 17:05:14.278240 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"69b4808f-07a4-457b-97b9-675631790938","Type":"ContainerStarted","Data":"e3bf293cce33b38dc2120ea9ed82c1c03da5661e18700bf59d94a7fffd0db70a"} Oct 03 17:05:14 crc kubenswrapper[5081]: I1003 17:05:14.278718 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 17:05:14 crc kubenswrapper[5081]: I1003 17:05:14.297543 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.297523582 podStartE2EDuration="3.297523582s" podCreationTimestamp="2025-10-03 17:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:05:14.294879356 +0000 UTC m=+5833.260435999" watchObservedRunningTime="2025-10-03 17:05:14.297523582 +0000 UTC m=+5833.263080205" Oct 03 17:05:15 crc kubenswrapper[5081]: I1003 17:05:15.864690 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 17:05:15 crc kubenswrapper[5081]: I1003 17:05:15.914407 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:16 crc kubenswrapper[5081]: I1003 17:05:16.296466 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="cinder-scheduler" containerID="cri-o://3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad" gracePeriod=30 Oct 03 17:05:16 crc kubenswrapper[5081]: I1003 17:05:16.296851 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="probe" containerID="cri-o://8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202" gracePeriod=30 Oct 03 17:05:17 crc kubenswrapper[5081]: I1003 17:05:17.306803 5081 generic.go:334] "Generic (PLEG): container finished" podID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerID="8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202" exitCode=0 Oct 03 17:05:17 crc kubenswrapper[5081]: I1003 17:05:17.306879 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"671c9d31-71f3-4e55-b100-bdb793be57b5","Type":"ContainerDied","Data":"8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202"} Oct 03 17:05:17 crc kubenswrapper[5081]: I1003 17:05:17.864549 5081 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod7c45a8eb-08c5-4a88-ae2f-835953e14a86"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod7c45a8eb-08c5-4a88-ae2f-835953e14a86] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7c45a8eb_08c5_4a88_ae2f_835953e14a86.slice" Oct 03 17:05:17 crc kubenswrapper[5081]: E1003 17:05:17.864632 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod7c45a8eb-08c5-4a88-ae2f-835953e14a86] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod7c45a8eb-08c5-4a88-ae2f-835953e14a86] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7c45a8eb_08c5_4a88_ae2f_835953e14a86.slice" pod="openstack/nova-cell1-novncproxy-0" podUID="7c45a8eb-08c5-4a88-ae2f-835953e14a86" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.104341 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.134879 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.315515 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.337096 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.346772 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.368068 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.370107 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.374146 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.384817 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.493534 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckfqh\" (UniqueName: \"kubernetes.io/projected/ae810f05-5faa-4f0b-922e-9c2128d25d5b-kube-api-access-ckfqh\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.493768 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae810f05-5faa-4f0b-922e-9c2128d25d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.493812 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae810f05-5faa-4f0b-922e-9c2128d25d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.596473 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae810f05-5faa-4f0b-922e-9c2128d25d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.596539 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae810f05-5faa-4f0b-922e-9c2128d25d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.596656 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckfqh\" (UniqueName: \"kubernetes.io/projected/ae810f05-5faa-4f0b-922e-9c2128d25d5b-kube-api-access-ckfqh\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.605910 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae810f05-5faa-4f0b-922e-9c2128d25d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.607624 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae810f05-5faa-4f0b-922e-9c2128d25d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.624435 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckfqh\" (UniqueName: \"kubernetes.io/projected/ae810f05-5faa-4f0b-922e-9c2128d25d5b-kube-api-access-ckfqh\") pod \"nova-cell1-novncproxy-0\" (UID: \"ae810f05-5faa-4f0b-922e-9c2128d25d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.703336 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:18 crc kubenswrapper[5081]: I1003 17:05:18.965452 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.104621 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-scripts\") pod \"671c9d31-71f3-4e55-b100-bdb793be57b5\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.104749 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88rtw\" (UniqueName: \"kubernetes.io/projected/671c9d31-71f3-4e55-b100-bdb793be57b5-kube-api-access-88rtw\") pod \"671c9d31-71f3-4e55-b100-bdb793be57b5\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.104838 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-combined-ca-bundle\") pod \"671c9d31-71f3-4e55-b100-bdb793be57b5\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.104865 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data\") pod \"671c9d31-71f3-4e55-b100-bdb793be57b5\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.104883 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/671c9d31-71f3-4e55-b100-bdb793be57b5-etc-machine-id\") pod \"671c9d31-71f3-4e55-b100-bdb793be57b5\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.104942 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data-custom\") pod \"671c9d31-71f3-4e55-b100-bdb793be57b5\" (UID: \"671c9d31-71f3-4e55-b100-bdb793be57b5\") " Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.105352 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/671c9d31-71f3-4e55-b100-bdb793be57b5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "671c9d31-71f3-4e55-b100-bdb793be57b5" (UID: "671c9d31-71f3-4e55-b100-bdb793be57b5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.105784 5081 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/671c9d31-71f3-4e55-b100-bdb793be57b5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.108419 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "671c9d31-71f3-4e55-b100-bdb793be57b5" (UID: "671c9d31-71f3-4e55-b100-bdb793be57b5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.108579 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-scripts" (OuterVolumeSpecName: "scripts") pod "671c9d31-71f3-4e55-b100-bdb793be57b5" (UID: "671c9d31-71f3-4e55-b100-bdb793be57b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.110025 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/671c9d31-71f3-4e55-b100-bdb793be57b5-kube-api-access-88rtw" (OuterVolumeSpecName: "kube-api-access-88rtw") pod "671c9d31-71f3-4e55-b100-bdb793be57b5" (UID: "671c9d31-71f3-4e55-b100-bdb793be57b5"). InnerVolumeSpecName "kube-api-access-88rtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.158359 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "671c9d31-71f3-4e55-b100-bdb793be57b5" (UID: "671c9d31-71f3-4e55-b100-bdb793be57b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.202192 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data" (OuterVolumeSpecName: "config-data") pod "671c9d31-71f3-4e55-b100-bdb793be57b5" (UID: "671c9d31-71f3-4e55-b100-bdb793be57b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.208918 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.208964 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.209052 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.209065 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/671c9d31-71f3-4e55-b100-bdb793be57b5-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.209077 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88rtw\" (UniqueName: \"kubernetes.io/projected/671c9d31-71f3-4e55-b100-bdb793be57b5-kube-api-access-88rtw\") on node \"crc\" DevicePath \"\"" Oct 03 17:05:19 crc kubenswrapper[5081]: W1003 17:05:19.215694 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae810f05_5faa_4f0b_922e_9c2128d25d5b.slice/crio-df96439bba36b8f84ce9894e9d73f9df2f0f600ab19c1dd5b5b43e5d0bd697d0 WatchSource:0}: Error finding container df96439bba36b8f84ce9894e9d73f9df2f0f600ab19c1dd5b5b43e5d0bd697d0: Status 404 returned error can't find the container with id df96439bba36b8f84ce9894e9d73f9df2f0f600ab19c1dd5b5b43e5d0bd697d0 Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.219015 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.327550 5081 generic.go:334] "Generic (PLEG): container finished" podID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerID="3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad" exitCode=0 Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.327647 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"671c9d31-71f3-4e55-b100-bdb793be57b5","Type":"ContainerDied","Data":"3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad"} Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.327667 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.327696 5081 scope.go:117] "RemoveContainer" containerID="8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.327680 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"671c9d31-71f3-4e55-b100-bdb793be57b5","Type":"ContainerDied","Data":"ea264593cb866b212a894de8e7e8678ad7c0c9e23fa0cb3412c5fb6a6d4f40d1"} Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.328950 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ae810f05-5faa-4f0b-922e-9c2128d25d5b","Type":"ContainerStarted","Data":"df96439bba36b8f84ce9894e9d73f9df2f0f600ab19c1dd5b5b43e5d0bd697d0"} Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.415119 5081 scope.go:117] "RemoveContainer" containerID="3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.415434 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.425719 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.452336 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:19 crc kubenswrapper[5081]: E1003 17:05:19.452767 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="cinder-scheduler" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.452784 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="cinder-scheduler" Oct 03 17:05:19 crc kubenswrapper[5081]: E1003 17:05:19.452810 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="probe" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.452816 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="probe" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.452995 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="cinder-scheduler" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.453009 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" containerName="probe" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.454187 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.462320 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.476012 5081 scope.go:117] "RemoveContainer" containerID="8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.481053 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:19 crc kubenswrapper[5081]: E1003 17:05:19.508738 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202\": container with ID starting with 8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202 not found: ID does not exist" containerID="8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.508800 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202"} err="failed to get container status \"8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202\": rpc error: code = NotFound desc = could not find container \"8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202\": container with ID starting with 8c453a3cd6905c0bfe1777fe4b8b93a4c12a8c698fb6711688446fb663fbc202 not found: ID does not exist" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.508831 5081 scope.go:117] "RemoveContainer" containerID="3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad" Oct 03 17:05:19 crc kubenswrapper[5081]: E1003 17:05:19.510066 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad\": container with ID starting with 3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad not found: ID does not exist" containerID="3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.510108 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad"} err="failed to get container status \"3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad\": rpc error: code = NotFound desc = could not find container \"3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad\": container with ID starting with 3eb3a1adc6301d91f3c218b7c310cbe40ce15e85e8629e8807ed62d0fad2a0ad not found: ID does not exist" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.518738 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.518820 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-scripts\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.518846 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsbtk\" (UniqueName: \"kubernetes.io/projected/dda4cdc8-07f1-4153-8531-0a827ccf3029-kube-api-access-gsbtk\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.518906 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.518945 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dda4cdc8-07f1-4153-8531-0a827ccf3029-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.518969 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-config-data\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.620066 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-scripts\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.620120 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsbtk\" (UniqueName: \"kubernetes.io/projected/dda4cdc8-07f1-4153-8531-0a827ccf3029-kube-api-access-gsbtk\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.620185 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.620230 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dda4cdc8-07f1-4153-8531-0a827ccf3029-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.620253 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-config-data\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.620284 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.620789 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dda4cdc8-07f1-4153-8531-0a827ccf3029-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.625541 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-scripts\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.625663 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.627050 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.638118 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsbtk\" (UniqueName: \"kubernetes.io/projected/dda4cdc8-07f1-4153-8531-0a827ccf3029-kube-api-access-gsbtk\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.641492 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dda4cdc8-07f1-4153-8531-0a827ccf3029-config-data\") pod \"cinder-scheduler-0\" (UID: \"dda4cdc8-07f1-4153-8531-0a827ccf3029\") " pod="openstack/cinder-scheduler-0" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.837787 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="671c9d31-71f3-4e55-b100-bdb793be57b5" path="/var/lib/kubelet/pods/671c9d31-71f3-4e55-b100-bdb793be57b5/volumes" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.838372 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c45a8eb-08c5-4a88-ae2f-835953e14a86" path="/var/lib/kubelet/pods/7c45a8eb-08c5-4a88-ae2f-835953e14a86/volumes" Oct 03 17:05:19 crc kubenswrapper[5081]: I1003 17:05:19.878789 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 17:05:20 crc kubenswrapper[5081]: I1003 17:05:20.035195 5081 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod0d01708d-75cb-411e-95ac-d3458148063e"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod0d01708d-75cb-411e-95ac-d3458148063e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod0d01708d_75cb_411e_95ac_d3458148063e.slice" Oct 03 17:05:20 crc kubenswrapper[5081]: W1003 17:05:20.316588 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddda4cdc8_07f1_4153_8531_0a827ccf3029.slice/crio-da349359a15cbbc21ef9097b3d6b99418b516aee3d3e16c9bfa90c96778acb2d WatchSource:0}: Error finding container da349359a15cbbc21ef9097b3d6b99418b516aee3d3e16c9bfa90c96778acb2d: Status 404 returned error can't find the container with id da349359a15cbbc21ef9097b3d6b99418b516aee3d3e16c9bfa90c96778acb2d Oct 03 17:05:20 crc kubenswrapper[5081]: I1003 17:05:20.318214 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 17:05:20 crc kubenswrapper[5081]: I1003 17:05:20.346639 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ae810f05-5faa-4f0b-922e-9c2128d25d5b","Type":"ContainerStarted","Data":"fa13b2113ced6045926274abc0a0a055637468225603c1377e43a805e9dfb8d0"} Oct 03 17:05:20 crc kubenswrapper[5081]: I1003 17:05:20.348635 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dda4cdc8-07f1-4153-8531-0a827ccf3029","Type":"ContainerStarted","Data":"da349359a15cbbc21ef9097b3d6b99418b516aee3d3e16c9bfa90c96778acb2d"} Oct 03 17:05:20 crc kubenswrapper[5081]: I1003 17:05:20.368702 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.368687723 podStartE2EDuration="2.368687723s" podCreationTimestamp="2025-10-03 17:05:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:05:20.366823649 +0000 UTC m=+5839.332380262" watchObservedRunningTime="2025-10-03 17:05:20.368687723 +0000 UTC m=+5839.334244336" Oct 03 17:05:21 crc kubenswrapper[5081]: I1003 17:05:21.362315 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dda4cdc8-07f1-4153-8531-0a827ccf3029","Type":"ContainerStarted","Data":"89c4d3db24c640b5b131a7f412f6723f0f93af5445cb1f48d49c5fd8901f61df"} Oct 03 17:05:22 crc kubenswrapper[5081]: I1003 17:05:22.382055 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dda4cdc8-07f1-4153-8531-0a827ccf3029","Type":"ContainerStarted","Data":"f0214e889a7b6f69b3057069b3fd79f55165415833841d1c0b9ae89390fc0265"} Oct 03 17:05:22 crc kubenswrapper[5081]: I1003 17:05:22.418386 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.418364755 podStartE2EDuration="3.418364755s" podCreationTimestamp="2025-10-03 17:05:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:05:22.410230841 +0000 UTC m=+5841.375787504" watchObservedRunningTime="2025-10-03 17:05:22.418364755 +0000 UTC m=+5841.383921368" Oct 03 17:05:23 crc kubenswrapper[5081]: I1003 17:05:23.700656 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 03 17:05:23 crc kubenswrapper[5081]: I1003 17:05:23.704614 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:24 crc kubenswrapper[5081]: I1003 17:05:24.878986 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 17:05:25 crc kubenswrapper[5081]: I1003 17:05:25.827500 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:05:25 crc kubenswrapper[5081]: E1003 17:05:25.827876 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:05:28 crc kubenswrapper[5081]: I1003 17:05:28.704620 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:28 crc kubenswrapper[5081]: I1003 17:05:28.716197 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:29 crc kubenswrapper[5081]: I1003 17:05:29.464881 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 03 17:05:30 crc kubenswrapper[5081]: I1003 17:05:30.165895 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 17:05:36 crc kubenswrapper[5081]: I1003 17:05:36.827639 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:05:36 crc kubenswrapper[5081]: E1003 17:05:36.828367 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:05:47 crc kubenswrapper[5081]: I1003 17:05:47.828377 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:05:47 crc kubenswrapper[5081]: E1003 17:05:47.829436 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:05:57 crc kubenswrapper[5081]: I1003 17:05:57.061420 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-kr6pl"] Oct 03 17:05:57 crc kubenswrapper[5081]: I1003 17:05:57.073503 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-kr6pl"] Oct 03 17:05:57 crc kubenswrapper[5081]: I1003 17:05:57.841192 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7e99a29-237d-4ef4-9632-b9e2d990e0f4" path="/var/lib/kubelet/pods/a7e99a29-237d-4ef4-9632-b9e2d990e0f4/volumes" Oct 03 17:05:58 crc kubenswrapper[5081]: I1003 17:05:58.828686 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:05:58 crc kubenswrapper[5081]: E1003 17:05:58.829278 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:06:07 crc kubenswrapper[5081]: I1003 17:06:07.037207 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9b2f-account-create-vqvmp"] Oct 03 17:06:07 crc kubenswrapper[5081]: I1003 17:06:07.044509 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9b2f-account-create-vqvmp"] Oct 03 17:06:07 crc kubenswrapper[5081]: I1003 17:06:07.838364 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47783f9d-99a3-407e-ba0d-485a66c35a73" path="/var/lib/kubelet/pods/47783f9d-99a3-407e-ba0d-485a66c35a73/volumes" Oct 03 17:06:11 crc kubenswrapper[5081]: I1003 17:06:11.835437 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:06:11 crc kubenswrapper[5081]: E1003 17:06:11.837079 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:06:15 crc kubenswrapper[5081]: I1003 17:06:15.036923 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-6jswn"] Oct 03 17:06:15 crc kubenswrapper[5081]: I1003 17:06:15.046607 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-6jswn"] Oct 03 17:06:15 crc kubenswrapper[5081]: I1003 17:06:15.841126 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144" path="/var/lib/kubelet/pods/5e4ecbe6-c4ff-4931-b59e-b0e4aee9d144/volumes" Oct 03 17:06:23 crc kubenswrapper[5081]: I1003 17:06:23.397696 5081 scope.go:117] "RemoveContainer" containerID="20bd3746003b8df5c92ec1f57bd7cc70c364306450c882c64283bf684bb2d5c0" Oct 03 17:06:23 crc kubenswrapper[5081]: I1003 17:06:23.443989 5081 scope.go:117] "RemoveContainer" containerID="9d52cd93fd242289b177af03db3fd115f62ce9da033a500c3ec7abc510f135ed" Oct 03 17:06:23 crc kubenswrapper[5081]: I1003 17:06:23.477460 5081 scope.go:117] "RemoveContainer" containerID="8e187ba35b9795666e456d316c84c1a0775e2258f9a358526be61c903c232ac7" Oct 03 17:06:23 crc kubenswrapper[5081]: I1003 17:06:23.509411 5081 scope.go:117] "RemoveContainer" containerID="5f2ff17e5d38c501d6ffc163fbae0338020c6ccc012deb306f3901dbcae315e4" Oct 03 17:06:23 crc kubenswrapper[5081]: I1003 17:06:23.551389 5081 scope.go:117] "RemoveContainer" containerID="626dfd33d9ca0721cd8a0fb45936ffd3f7f6cc6ff915e7cac93665e9e73deb5c" Oct 03 17:06:25 crc kubenswrapper[5081]: I1003 17:06:25.828594 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:06:25 crc kubenswrapper[5081]: E1003 17:06:25.829161 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:06:28 crc kubenswrapper[5081]: I1003 17:06:28.040051 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-scl5w"] Oct 03 17:06:28 crc kubenswrapper[5081]: I1003 17:06:28.048924 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-scl5w"] Oct 03 17:06:29 crc kubenswrapper[5081]: I1003 17:06:29.839506 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccc072d6-fcd6-4318-a9c8-b2cc91f7a524" path="/var/lib/kubelet/pods/ccc072d6-fcd6-4318-a9c8-b2cc91f7a524/volumes" Oct 03 17:06:39 crc kubenswrapper[5081]: I1003 17:06:39.828086 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:06:39 crc kubenswrapper[5081]: E1003 17:06:39.828930 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.540765 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-47k2g"] Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.545447 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.555852 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-47k2g"] Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.684604 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r57g\" (UniqueName: \"kubernetes.io/projected/a88f38ff-7774-462b-ae20-4076ec4131ac-kube-api-access-4r57g\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.684705 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-utilities\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.684770 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-catalog-content\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.787086 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r57g\" (UniqueName: \"kubernetes.io/projected/a88f38ff-7774-462b-ae20-4076ec4131ac-kube-api-access-4r57g\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.787164 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-utilities\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.787226 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-catalog-content\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.787660 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-catalog-content\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.787762 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-utilities\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.812853 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r57g\" (UniqueName: \"kubernetes.io/projected/a88f38ff-7774-462b-ae20-4076ec4131ac-kube-api-access-4r57g\") pod \"community-operators-47k2g\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:49 crc kubenswrapper[5081]: I1003 17:06:49.864011 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:50 crc kubenswrapper[5081]: I1003 17:06:50.387609 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-47k2g"] Oct 03 17:06:50 crc kubenswrapper[5081]: W1003 17:06:50.398247 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda88f38ff_7774_462b_ae20_4076ec4131ac.slice/crio-34fd20fcff6075823f149e66785e407b07bd0783c0fa7e9a59d186fe7868e595 WatchSource:0}: Error finding container 34fd20fcff6075823f149e66785e407b07bd0783c0fa7e9a59d186fe7868e595: Status 404 returned error can't find the container with id 34fd20fcff6075823f149e66785e407b07bd0783c0fa7e9a59d186fe7868e595 Oct 03 17:06:51 crc kubenswrapper[5081]: I1003 17:06:51.211923 5081 generic.go:334] "Generic (PLEG): container finished" podID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerID="794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac" exitCode=0 Oct 03 17:06:51 crc kubenswrapper[5081]: I1003 17:06:51.212180 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47k2g" event={"ID":"a88f38ff-7774-462b-ae20-4076ec4131ac","Type":"ContainerDied","Data":"794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac"} Oct 03 17:06:51 crc kubenswrapper[5081]: I1003 17:06:51.212203 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47k2g" event={"ID":"a88f38ff-7774-462b-ae20-4076ec4131ac","Type":"ContainerStarted","Data":"34fd20fcff6075823f149e66785e407b07bd0783c0fa7e9a59d186fe7868e595"} Oct 03 17:06:52 crc kubenswrapper[5081]: I1003 17:06:52.223469 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47k2g" event={"ID":"a88f38ff-7774-462b-ae20-4076ec4131ac","Type":"ContainerStarted","Data":"2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979"} Oct 03 17:06:52 crc kubenswrapper[5081]: I1003 17:06:52.829165 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:06:52 crc kubenswrapper[5081]: E1003 17:06:52.829430 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:06:53 crc kubenswrapper[5081]: I1003 17:06:53.234217 5081 generic.go:334] "Generic (PLEG): container finished" podID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerID="2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979" exitCode=0 Oct 03 17:06:53 crc kubenswrapper[5081]: I1003 17:06:53.234262 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47k2g" event={"ID":"a88f38ff-7774-462b-ae20-4076ec4131ac","Type":"ContainerDied","Data":"2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979"} Oct 03 17:06:54 crc kubenswrapper[5081]: I1003 17:06:54.245092 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47k2g" event={"ID":"a88f38ff-7774-462b-ae20-4076ec4131ac","Type":"ContainerStarted","Data":"c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42"} Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.698630 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-47k2g" podStartSLOduration=7.129969526 podStartE2EDuration="9.698610799s" podCreationTimestamp="2025-10-03 17:06:49 +0000 UTC" firstStartedPulling="2025-10-03 17:06:51.214483893 +0000 UTC m=+5930.180040506" lastFinishedPulling="2025-10-03 17:06:53.783125176 +0000 UTC m=+5932.748681779" observedRunningTime="2025-10-03 17:06:54.268351904 +0000 UTC m=+5933.233908527" watchObservedRunningTime="2025-10-03 17:06:58.698610799 +0000 UTC m=+5937.664167412" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.707949 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8r9ll"] Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.709918 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.717467 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8r9ll"] Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.816678 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-utilities\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.816745 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxhcr\" (UniqueName: \"kubernetes.io/projected/0d534c0a-ff04-4b98-af01-8a40cd0390a1-kube-api-access-hxhcr\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.816820 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-catalog-content\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.918853 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-utilities\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.918901 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxhcr\" (UniqueName: \"kubernetes.io/projected/0d534c0a-ff04-4b98-af01-8a40cd0390a1-kube-api-access-hxhcr\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.918934 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-catalog-content\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.919367 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-utilities\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.919659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-catalog-content\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:58 crc kubenswrapper[5081]: I1003 17:06:58.943434 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxhcr\" (UniqueName: \"kubernetes.io/projected/0d534c0a-ff04-4b98-af01-8a40cd0390a1-kube-api-access-hxhcr\") pod \"certified-operators-8r9ll\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:59 crc kubenswrapper[5081]: I1003 17:06:59.041360 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:06:59 crc kubenswrapper[5081]: I1003 17:06:59.554409 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8r9ll"] Oct 03 17:06:59 crc kubenswrapper[5081]: I1003 17:06:59.865938 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:59 crc kubenswrapper[5081]: I1003 17:06:59.866258 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:06:59 crc kubenswrapper[5081]: I1003 17:06:59.912226 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:07:00 crc kubenswrapper[5081]: I1003 17:07:00.295350 5081 generic.go:334] "Generic (PLEG): container finished" podID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerID="fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962" exitCode=0 Oct 03 17:07:00 crc kubenswrapper[5081]: I1003 17:07:00.295438 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8r9ll" event={"ID":"0d534c0a-ff04-4b98-af01-8a40cd0390a1","Type":"ContainerDied","Data":"fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962"} Oct 03 17:07:00 crc kubenswrapper[5081]: I1003 17:07:00.295467 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8r9ll" event={"ID":"0d534c0a-ff04-4b98-af01-8a40cd0390a1","Type":"ContainerStarted","Data":"d7d7a3bb4feae8204817742697d5dcdaebf7ee3438d94bb7513100bb09659278"} Oct 03 17:07:00 crc kubenswrapper[5081]: I1003 17:07:00.347506 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:07:01 crc kubenswrapper[5081]: I1003 17:07:01.311478 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8r9ll" event={"ID":"0d534c0a-ff04-4b98-af01-8a40cd0390a1","Type":"ContainerStarted","Data":"ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082"} Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.276464 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-47k2g"] Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.322926 5081 generic.go:334] "Generic (PLEG): container finished" podID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerID="ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082" exitCode=0 Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.323154 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-47k2g" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="registry-server" containerID="cri-o://c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42" gracePeriod=2 Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.324148 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8r9ll" event={"ID":"0d534c0a-ff04-4b98-af01-8a40cd0390a1","Type":"ContainerDied","Data":"ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082"} Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.758099 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.900416 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r57g\" (UniqueName: \"kubernetes.io/projected/a88f38ff-7774-462b-ae20-4076ec4131ac-kube-api-access-4r57g\") pod \"a88f38ff-7774-462b-ae20-4076ec4131ac\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.900546 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-utilities\") pod \"a88f38ff-7774-462b-ae20-4076ec4131ac\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.900697 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-catalog-content\") pod \"a88f38ff-7774-462b-ae20-4076ec4131ac\" (UID: \"a88f38ff-7774-462b-ae20-4076ec4131ac\") " Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.901864 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-utilities" (OuterVolumeSpecName: "utilities") pod "a88f38ff-7774-462b-ae20-4076ec4131ac" (UID: "a88f38ff-7774-462b-ae20-4076ec4131ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.907548 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a88f38ff-7774-462b-ae20-4076ec4131ac-kube-api-access-4r57g" (OuterVolumeSpecName: "kube-api-access-4r57g") pod "a88f38ff-7774-462b-ae20-4076ec4131ac" (UID: "a88f38ff-7774-462b-ae20-4076ec4131ac"). InnerVolumeSpecName "kube-api-access-4r57g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:07:02 crc kubenswrapper[5081]: I1003 17:07:02.959421 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a88f38ff-7774-462b-ae20-4076ec4131ac" (UID: "a88f38ff-7774-462b-ae20-4076ec4131ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.003111 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.003150 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r57g\" (UniqueName: \"kubernetes.io/projected/a88f38ff-7774-462b-ae20-4076ec4131ac-kube-api-access-4r57g\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.003162 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88f38ff-7774-462b-ae20-4076ec4131ac-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.333913 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8r9ll" event={"ID":"0d534c0a-ff04-4b98-af01-8a40cd0390a1","Type":"ContainerStarted","Data":"f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c"} Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.336934 5081 generic.go:334] "Generic (PLEG): container finished" podID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerID="c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42" exitCode=0 Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.336981 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47k2g" event={"ID":"a88f38ff-7774-462b-ae20-4076ec4131ac","Type":"ContainerDied","Data":"c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42"} Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.337008 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47k2g" event={"ID":"a88f38ff-7774-462b-ae20-4076ec4131ac","Type":"ContainerDied","Data":"34fd20fcff6075823f149e66785e407b07bd0783c0fa7e9a59d186fe7868e595"} Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.337028 5081 scope.go:117] "RemoveContainer" containerID="c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.337154 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47k2g" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.350529 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8r9ll" podStartSLOduration=2.92511442 podStartE2EDuration="5.350511281s" podCreationTimestamp="2025-10-03 17:06:58 +0000 UTC" firstStartedPulling="2025-10-03 17:07:00.297572967 +0000 UTC m=+5939.263129580" lastFinishedPulling="2025-10-03 17:07:02.722969828 +0000 UTC m=+5941.688526441" observedRunningTime="2025-10-03 17:07:03.349981586 +0000 UTC m=+5942.315538199" watchObservedRunningTime="2025-10-03 17:07:03.350511281 +0000 UTC m=+5942.316067894" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.372502 5081 scope.go:117] "RemoveContainer" containerID="2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.378973 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-47k2g"] Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.387384 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-47k2g"] Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.405922 5081 scope.go:117] "RemoveContainer" containerID="794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.439498 5081 scope.go:117] "RemoveContainer" containerID="c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42" Oct 03 17:07:03 crc kubenswrapper[5081]: E1003 17:07:03.440363 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42\": container with ID starting with c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42 not found: ID does not exist" containerID="c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.440408 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42"} err="failed to get container status \"c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42\": rpc error: code = NotFound desc = could not find container \"c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42\": container with ID starting with c55db663e9c6228f4b2793441407d5fc5230cf006ccd850b4ac6f47e02b94c42 not found: ID does not exist" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.440440 5081 scope.go:117] "RemoveContainer" containerID="2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979" Oct 03 17:07:03 crc kubenswrapper[5081]: E1003 17:07:03.441147 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979\": container with ID starting with 2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979 not found: ID does not exist" containerID="2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.441181 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979"} err="failed to get container status \"2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979\": rpc error: code = NotFound desc = could not find container \"2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979\": container with ID starting with 2f3477bd2bc85193d57729be9f67862cf7f79e6615cce390268f14cf483a7979 not found: ID does not exist" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.441201 5081 scope.go:117] "RemoveContainer" containerID="794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac" Oct 03 17:07:03 crc kubenswrapper[5081]: E1003 17:07:03.441657 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac\": container with ID starting with 794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac not found: ID does not exist" containerID="794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.441698 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac"} err="failed to get container status \"794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac\": rpc error: code = NotFound desc = could not find container \"794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac\": container with ID starting with 794b2bf8ce6ff96422afcea5224503ff801f2fc4f1da112c16908ffdca58f4ac not found: ID does not exist" Oct 03 17:07:03 crc kubenswrapper[5081]: I1003 17:07:03.840918 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" path="/var/lib/kubelet/pods/a88f38ff-7774-462b-ae20-4076ec4131ac/volumes" Oct 03 17:07:04 crc kubenswrapper[5081]: I1003 17:07:04.827638 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:07:04 crc kubenswrapper[5081]: E1003 17:07:04.827914 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:07:09 crc kubenswrapper[5081]: I1003 17:07:09.042281 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:07:09 crc kubenswrapper[5081]: I1003 17:07:09.042900 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:07:09 crc kubenswrapper[5081]: I1003 17:07:09.097054 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:07:09 crc kubenswrapper[5081]: I1003 17:07:09.442218 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:07:09 crc kubenswrapper[5081]: I1003 17:07:09.495245 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8r9ll"] Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.047085 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-fjdv6"] Oct 03 17:07:10 crc kubenswrapper[5081]: E1003 17:07:10.047527 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="registry-server" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.047542 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="registry-server" Oct 03 17:07:10 crc kubenswrapper[5081]: E1003 17:07:10.047569 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="extract-utilities" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.047575 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="extract-utilities" Oct 03 17:07:10 crc kubenswrapper[5081]: E1003 17:07:10.047584 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="extract-content" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.047612 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="extract-content" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.047786 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88f38ff-7774-462b-ae20-4076ec4131ac" containerName="registry-server" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.048527 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.050540 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vqm9f" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.051008 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.057695 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-ccgkm"] Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.060250 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.067473 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ccgkm"] Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.076089 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fjdv6"] Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.183749 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-log\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184125 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-run-ovn\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184300 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-log-ovn\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184383 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-run\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184474 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv6mm\" (UniqueName: \"kubernetes.io/projected/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-kube-api-access-cv6mm\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184631 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-lib\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184715 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-run\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184789 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-scripts\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184908 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-etc-ovs\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.184988 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed1f167d-3b34-4746-bf62-f4bea485b117-scripts\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.185098 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz44x\" (UniqueName: \"kubernetes.io/projected/ed1f167d-3b34-4746-bf62-f4bea485b117-kube-api-access-wz44x\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.285873 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz44x\" (UniqueName: \"kubernetes.io/projected/ed1f167d-3b34-4746-bf62-f4bea485b117-kube-api-access-wz44x\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.285944 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-log\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.285972 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-run-ovn\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.285987 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-log-ovn\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286020 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-run\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286040 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv6mm\" (UniqueName: \"kubernetes.io/projected/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-kube-api-access-cv6mm\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286093 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-lib\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286115 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-run\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286134 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-scripts\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286192 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-etc-ovs\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286213 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed1f167d-3b34-4746-bf62-f4bea485b117-scripts\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286342 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-log\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286895 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-run-ovn\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.286963 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-log-ovn\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.287018 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed1f167d-3b34-4746-bf62-f4bea485b117-var-run\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.287070 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-run\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.287130 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-var-lib\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.288376 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed1f167d-3b34-4746-bf62-f4bea485b117-scripts\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.288446 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-etc-ovs\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.289249 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-scripts\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.307369 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz44x\" (UniqueName: \"kubernetes.io/projected/ed1f167d-3b34-4746-bf62-f4bea485b117-kube-api-access-wz44x\") pod \"ovn-controller-fjdv6\" (UID: \"ed1f167d-3b34-4746-bf62-f4bea485b117\") " pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.308257 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv6mm\" (UniqueName: \"kubernetes.io/projected/923d9dbc-ce8e-48b1-8425-4d9075edfa5c-kube-api-access-cv6mm\") pod \"ovn-controller-ovs-ccgkm\" (UID: \"923d9dbc-ce8e-48b1-8425-4d9075edfa5c\") " pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.378090 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.398486 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:10 crc kubenswrapper[5081]: I1003 17:07:10.921855 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fjdv6"] Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.242860 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ccgkm"] Oct 03 17:07:11 crc kubenswrapper[5081]: W1003 17:07:11.248401 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod923d9dbc_ce8e_48b1_8425_4d9075edfa5c.slice/crio-8a7668d13c4b54af8ee3db1440d474e99bcbfd4ea0755de10dedbbb185acd345 WatchSource:0}: Error finding container 8a7668d13c4b54af8ee3db1440d474e99bcbfd4ea0755de10dedbbb185acd345: Status 404 returned error can't find the container with id 8a7668d13c4b54af8ee3db1440d474e99bcbfd4ea0755de10dedbbb185acd345 Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.426029 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ccgkm" event={"ID":"923d9dbc-ce8e-48b1-8425-4d9075edfa5c","Type":"ContainerStarted","Data":"8a7668d13c4b54af8ee3db1440d474e99bcbfd4ea0755de10dedbbb185acd345"} Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.430352 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fjdv6" event={"ID":"ed1f167d-3b34-4746-bf62-f4bea485b117","Type":"ContainerStarted","Data":"db12fe0482830ac15c173c5ff9ce7131bcab5e7b390cfd80a4556067a02a448f"} Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.430561 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8r9ll" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="registry-server" containerID="cri-o://f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c" gracePeriod=2 Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.480836 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-5xtx9"] Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.482342 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.487971 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.498016 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5xtx9"] Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.537382 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-config\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.537496 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-ovn-rundir\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.537560 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkmrv\" (UniqueName: \"kubernetes.io/projected/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-kube-api-access-dkmrv\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.537897 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-ovs-rundir\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.640033 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-ovs-rundir\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.640370 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-ovs-rundir\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.640476 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-config\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.640547 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-ovn-rundir\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.640625 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkmrv\" (UniqueName: \"kubernetes.io/projected/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-kube-api-access-dkmrv\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.641032 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-ovn-rundir\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.641737 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-config\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.663351 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkmrv\" (UniqueName: \"kubernetes.io/projected/5e9fd2cd-5158-4c3d-ac50-60e94ba543b4-kube-api-access-dkmrv\") pod \"ovn-controller-metrics-5xtx9\" (UID: \"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4\") " pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.913432 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5xtx9" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.915544 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.947657 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-catalog-content\") pod \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.947760 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxhcr\" (UniqueName: \"kubernetes.io/projected/0d534c0a-ff04-4b98-af01-8a40cd0390a1-kube-api-access-hxhcr\") pod \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.947793 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-utilities\") pod \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\" (UID: \"0d534c0a-ff04-4b98-af01-8a40cd0390a1\") " Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.948946 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-utilities" (OuterVolumeSpecName: "utilities") pod "0d534c0a-ff04-4b98-af01-8a40cd0390a1" (UID: "0d534c0a-ff04-4b98-af01-8a40cd0390a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:07:11 crc kubenswrapper[5081]: I1003 17:07:11.953259 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d534c0a-ff04-4b98-af01-8a40cd0390a1-kube-api-access-hxhcr" (OuterVolumeSpecName: "kube-api-access-hxhcr") pod "0d534c0a-ff04-4b98-af01-8a40cd0390a1" (UID: "0d534c0a-ff04-4b98-af01-8a40cd0390a1"). InnerVolumeSpecName "kube-api-access-hxhcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.007869 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d534c0a-ff04-4b98-af01-8a40cd0390a1" (UID: "0d534c0a-ff04-4b98-af01-8a40cd0390a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.050567 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.052076 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxhcr\" (UniqueName: \"kubernetes.io/projected/0d534c0a-ff04-4b98-af01-8a40cd0390a1-kube-api-access-hxhcr\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.052095 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d534c0a-ff04-4b98-af01-8a40cd0390a1-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.440091 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5xtx9"] Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.450453 5081 generic.go:334] "Generic (PLEG): container finished" podID="923d9dbc-ce8e-48b1-8425-4d9075edfa5c" containerID="c99ef491fc1035323b7fe357c61303960b5af3fc7937d21a1d22c3dc40feb29d" exitCode=0 Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.450507 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ccgkm" event={"ID":"923d9dbc-ce8e-48b1-8425-4d9075edfa5c","Type":"ContainerDied","Data":"c99ef491fc1035323b7fe357c61303960b5af3fc7937d21a1d22c3dc40feb29d"} Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.454105 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fjdv6" event={"ID":"ed1f167d-3b34-4746-bf62-f4bea485b117","Type":"ContainerStarted","Data":"cf6c0450a0b27ad24c075c2cf4ec36c045bf7c9d2f58262ef51b77f4667e8116"} Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.454238 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.458402 5081 generic.go:334] "Generic (PLEG): container finished" podID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerID="f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c" exitCode=0 Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.458434 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8r9ll" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.458445 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8r9ll" event={"ID":"0d534c0a-ff04-4b98-af01-8a40cd0390a1","Type":"ContainerDied","Data":"f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c"} Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.458476 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8r9ll" event={"ID":"0d534c0a-ff04-4b98-af01-8a40cd0390a1","Type":"ContainerDied","Data":"d7d7a3bb4feae8204817742697d5dcdaebf7ee3438d94bb7513100bb09659278"} Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.458497 5081 scope.go:117] "RemoveContainer" containerID="f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.507423 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-fjdv6" podStartSLOduration=2.507399577 podStartE2EDuration="2.507399577s" podCreationTimestamp="2025-10-03 17:07:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:07:12.494830266 +0000 UTC m=+5951.460386889" watchObservedRunningTime="2025-10-03 17:07:12.507399577 +0000 UTC m=+5951.472956190" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.528443 5081 scope.go:117] "RemoveContainer" containerID="ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.610202 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8r9ll"] Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.617898 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8r9ll"] Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.619323 5081 scope.go:117] "RemoveContainer" containerID="fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.656317 5081 scope.go:117] "RemoveContainer" containerID="f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c" Oct 03 17:07:12 crc kubenswrapper[5081]: E1003 17:07:12.657625 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c\": container with ID starting with f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c not found: ID does not exist" containerID="f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.657691 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c"} err="failed to get container status \"f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c\": rpc error: code = NotFound desc = could not find container \"f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c\": container with ID starting with f84edf5a244fe1764d4ff9fe6bf7cc02a2bfeb3d9693d277865777e7e7dd5b5c not found: ID does not exist" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.657725 5081 scope.go:117] "RemoveContainer" containerID="ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082" Oct 03 17:07:12 crc kubenswrapper[5081]: E1003 17:07:12.658844 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082\": container with ID starting with ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082 not found: ID does not exist" containerID="ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.658889 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082"} err="failed to get container status \"ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082\": rpc error: code = NotFound desc = could not find container \"ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082\": container with ID starting with ad5a4ab9c03f6ad62f0fceccfb1aa224d6192cde1dcd10849ad49f8713839082 not found: ID does not exist" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.658914 5081 scope.go:117] "RemoveContainer" containerID="fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962" Oct 03 17:07:12 crc kubenswrapper[5081]: E1003 17:07:12.660346 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962\": container with ID starting with fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962 not found: ID does not exist" containerID="fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962" Oct 03 17:07:12 crc kubenswrapper[5081]: I1003 17:07:12.660393 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962"} err="failed to get container status \"fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962\": rpc error: code = NotFound desc = could not find container \"fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962\": container with ID starting with fbd7ea8ad65843b7501fab303362e29fa12c299e86ba501d507dfbd7a2389962 not found: ID does not exist" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.331574 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-mql6s"] Oct 03 17:07:13 crc kubenswrapper[5081]: E1003 17:07:13.332345 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="extract-utilities" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.332366 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="extract-utilities" Oct 03 17:07:13 crc kubenswrapper[5081]: E1003 17:07:13.332377 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="registry-server" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.332384 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="registry-server" Oct 03 17:07:13 crc kubenswrapper[5081]: E1003 17:07:13.332402 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="extract-content" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.332408 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="extract-content" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.332620 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" containerName="registry-server" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.333297 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-mql6s" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.348624 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-mql6s"] Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.390982 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfpfc\" (UniqueName: \"kubernetes.io/projected/5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7-kube-api-access-rfpfc\") pod \"octavia-db-create-mql6s\" (UID: \"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7\") " pod="openstack/octavia-db-create-mql6s" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.471096 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ccgkm" event={"ID":"923d9dbc-ce8e-48b1-8425-4d9075edfa5c","Type":"ContainerStarted","Data":"d0ad9493782f34f339fc33b3c68ecf3982b9f43c33f77f15a40affe2c6f7f89c"} Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.471145 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ccgkm" event={"ID":"923d9dbc-ce8e-48b1-8425-4d9075edfa5c","Type":"ContainerStarted","Data":"784b6d7be2dd591a71d1453becb4c0fd55b703b88a56ee6a180a652bdde7ff10"} Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.471309 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.474659 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5xtx9" event={"ID":"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4","Type":"ContainerStarted","Data":"da0309d449f814fa49cd2270be80b7cf4fa59d4c5201a7dded2a77420f7b736c"} Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.474829 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5xtx9" event={"ID":"5e9fd2cd-5158-4c3d-ac50-60e94ba543b4","Type":"ContainerStarted","Data":"b1f12d15c121eef059cedf9ae2cc400f6544ca2e31cb4f2f185cbfe4810fbb44"} Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.492349 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfpfc\" (UniqueName: \"kubernetes.io/projected/5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7-kube-api-access-rfpfc\") pod \"octavia-db-create-mql6s\" (UID: \"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7\") " pod="openstack/octavia-db-create-mql6s" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.504444 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-ccgkm" podStartSLOduration=3.504418519 podStartE2EDuration="3.504418519s" podCreationTimestamp="2025-10-03 17:07:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:07:13.490415416 +0000 UTC m=+5952.455972039" watchObservedRunningTime="2025-10-03 17:07:13.504418519 +0000 UTC m=+5952.469975152" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.528046 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfpfc\" (UniqueName: \"kubernetes.io/projected/5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7-kube-api-access-rfpfc\") pod \"octavia-db-create-mql6s\" (UID: \"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7\") " pod="openstack/octavia-db-create-mql6s" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.529506 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-5xtx9" podStartSLOduration=2.52949016 podStartE2EDuration="2.52949016s" podCreationTimestamp="2025-10-03 17:07:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:07:13.518937346 +0000 UTC m=+5952.484493969" watchObservedRunningTime="2025-10-03 17:07:13.52949016 +0000 UTC m=+5952.495046773" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.664604 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-mql6s" Oct 03 17:07:13 crc kubenswrapper[5081]: I1003 17:07:13.846177 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d534c0a-ff04-4b98-af01-8a40cd0390a1" path="/var/lib/kubelet/pods/0d534c0a-ff04-4b98-af01-8a40cd0390a1/volumes" Oct 03 17:07:14 crc kubenswrapper[5081]: I1003 17:07:14.148569 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-mql6s"] Oct 03 17:07:14 crc kubenswrapper[5081]: I1003 17:07:14.487406 5081 generic.go:334] "Generic (PLEG): container finished" podID="5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7" containerID="685d642eccea0bd6502038733fb97ef85cefea018794e17b25aa89e6f1f9ebfe" exitCode=0 Oct 03 17:07:14 crc kubenswrapper[5081]: I1003 17:07:14.487480 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-mql6s" event={"ID":"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7","Type":"ContainerDied","Data":"685d642eccea0bd6502038733fb97ef85cefea018794e17b25aa89e6f1f9ebfe"} Oct 03 17:07:14 crc kubenswrapper[5081]: I1003 17:07:14.487790 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-mql6s" event={"ID":"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7","Type":"ContainerStarted","Data":"562ab24fef67d8a82059d7ace2d70dc0a6cca688928747bb69ab2d0aa9ef21ff"} Oct 03 17:07:14 crc kubenswrapper[5081]: I1003 17:07:14.488006 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:15 crc kubenswrapper[5081]: I1003 17:07:15.856457 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-mql6s" Oct 03 17:07:15 crc kubenswrapper[5081]: I1003 17:07:15.944761 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfpfc\" (UniqueName: \"kubernetes.io/projected/5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7-kube-api-access-rfpfc\") pod \"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7\" (UID: \"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7\") " Oct 03 17:07:15 crc kubenswrapper[5081]: I1003 17:07:15.949503 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7-kube-api-access-rfpfc" (OuterVolumeSpecName: "kube-api-access-rfpfc") pod "5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7" (UID: "5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7"). InnerVolumeSpecName "kube-api-access-rfpfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:07:16 crc kubenswrapper[5081]: I1003 17:07:16.047461 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfpfc\" (UniqueName: \"kubernetes.io/projected/5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7-kube-api-access-rfpfc\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:16 crc kubenswrapper[5081]: I1003 17:07:16.507928 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-mql6s" event={"ID":"5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7","Type":"ContainerDied","Data":"562ab24fef67d8a82059d7ace2d70dc0a6cca688928747bb69ab2d0aa9ef21ff"} Oct 03 17:07:16 crc kubenswrapper[5081]: I1003 17:07:16.507971 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-mql6s" Oct 03 17:07:16 crc kubenswrapper[5081]: I1003 17:07:16.508002 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="562ab24fef67d8a82059d7ace2d70dc0a6cca688928747bb69ab2d0aa9ef21ff" Oct 03 17:07:17 crc kubenswrapper[5081]: I1003 17:07:17.842692 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:07:17 crc kubenswrapper[5081]: E1003 17:07:17.844252 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:07:23 crc kubenswrapper[5081]: I1003 17:07:23.654833 5081 scope.go:117] "RemoveContainer" containerID="f795ab1c30927ce3856116bc948e6e0eee7c94711e1069738be81eb4c8adadc0" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.268104 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-3907-account-create-7fvgs"] Oct 03 17:07:26 crc kubenswrapper[5081]: E1003 17:07:26.269221 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7" containerName="mariadb-database-create" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.269241 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7" containerName="mariadb-database-create" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.269480 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7" containerName="mariadb-database-create" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.270512 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3907-account-create-7fvgs" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.272954 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.284663 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-3907-account-create-7fvgs"] Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.337992 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4jrz\" (UniqueName: \"kubernetes.io/projected/2a742149-dad8-4a69-a1ba-ec38349e2bf7-kube-api-access-t4jrz\") pod \"octavia-3907-account-create-7fvgs\" (UID: \"2a742149-dad8-4a69-a1ba-ec38349e2bf7\") " pod="openstack/octavia-3907-account-create-7fvgs" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.440017 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4jrz\" (UniqueName: \"kubernetes.io/projected/2a742149-dad8-4a69-a1ba-ec38349e2bf7-kube-api-access-t4jrz\") pod \"octavia-3907-account-create-7fvgs\" (UID: \"2a742149-dad8-4a69-a1ba-ec38349e2bf7\") " pod="openstack/octavia-3907-account-create-7fvgs" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.465201 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4jrz\" (UniqueName: \"kubernetes.io/projected/2a742149-dad8-4a69-a1ba-ec38349e2bf7-kube-api-access-t4jrz\") pod \"octavia-3907-account-create-7fvgs\" (UID: \"2a742149-dad8-4a69-a1ba-ec38349e2bf7\") " pod="openstack/octavia-3907-account-create-7fvgs" Oct 03 17:07:26 crc kubenswrapper[5081]: I1003 17:07:26.597695 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3907-account-create-7fvgs" Oct 03 17:07:27 crc kubenswrapper[5081]: I1003 17:07:27.036499 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-3907-account-create-7fvgs"] Oct 03 17:07:27 crc kubenswrapper[5081]: W1003 17:07:27.038236 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a742149_dad8_4a69_a1ba_ec38349e2bf7.slice/crio-9af99234b4ea9418a90696afd0c5353d38c8789be2c73b1f53a43c55075727c7 WatchSource:0}: Error finding container 9af99234b4ea9418a90696afd0c5353d38c8789be2c73b1f53a43c55075727c7: Status 404 returned error can't find the container with id 9af99234b4ea9418a90696afd0c5353d38c8789be2c73b1f53a43c55075727c7 Oct 03 17:07:27 crc kubenswrapper[5081]: I1003 17:07:27.612951 5081 generic.go:334] "Generic (PLEG): container finished" podID="2a742149-dad8-4a69-a1ba-ec38349e2bf7" containerID="e34204778cb158e84b7ff6d9ff701561ff4d565956ef51329080d9d79c771e3e" exitCode=0 Oct 03 17:07:27 crc kubenswrapper[5081]: I1003 17:07:27.612997 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3907-account-create-7fvgs" event={"ID":"2a742149-dad8-4a69-a1ba-ec38349e2bf7","Type":"ContainerDied","Data":"e34204778cb158e84b7ff6d9ff701561ff4d565956ef51329080d9d79c771e3e"} Oct 03 17:07:27 crc kubenswrapper[5081]: I1003 17:07:27.613317 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3907-account-create-7fvgs" event={"ID":"2a742149-dad8-4a69-a1ba-ec38349e2bf7","Type":"ContainerStarted","Data":"9af99234b4ea9418a90696afd0c5353d38c8789be2c73b1f53a43c55075727c7"} Oct 03 17:07:28 crc kubenswrapper[5081]: I1003 17:07:28.828285 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:07:28 crc kubenswrapper[5081]: E1003 17:07:28.828914 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:07:28 crc kubenswrapper[5081]: I1003 17:07:28.940807 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3907-account-create-7fvgs" Oct 03 17:07:29 crc kubenswrapper[5081]: I1003 17:07:29.088204 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4jrz\" (UniqueName: \"kubernetes.io/projected/2a742149-dad8-4a69-a1ba-ec38349e2bf7-kube-api-access-t4jrz\") pod \"2a742149-dad8-4a69-a1ba-ec38349e2bf7\" (UID: \"2a742149-dad8-4a69-a1ba-ec38349e2bf7\") " Oct 03 17:07:29 crc kubenswrapper[5081]: I1003 17:07:29.093264 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a742149-dad8-4a69-a1ba-ec38349e2bf7-kube-api-access-t4jrz" (OuterVolumeSpecName: "kube-api-access-t4jrz") pod "2a742149-dad8-4a69-a1ba-ec38349e2bf7" (UID: "2a742149-dad8-4a69-a1ba-ec38349e2bf7"). InnerVolumeSpecName "kube-api-access-t4jrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:07:29 crc kubenswrapper[5081]: I1003 17:07:29.190665 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4jrz\" (UniqueName: \"kubernetes.io/projected/2a742149-dad8-4a69-a1ba-ec38349e2bf7-kube-api-access-t4jrz\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:29 crc kubenswrapper[5081]: I1003 17:07:29.632429 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3907-account-create-7fvgs" event={"ID":"2a742149-dad8-4a69-a1ba-ec38349e2bf7","Type":"ContainerDied","Data":"9af99234b4ea9418a90696afd0c5353d38c8789be2c73b1f53a43c55075727c7"} Oct 03 17:07:29 crc kubenswrapper[5081]: I1003 17:07:29.632750 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9af99234b4ea9418a90696afd0c5353d38c8789be2c73b1f53a43c55075727c7" Oct 03 17:07:29 crc kubenswrapper[5081]: I1003 17:07:29.632507 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3907-account-create-7fvgs" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.312795 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-mpj7m"] Oct 03 17:07:32 crc kubenswrapper[5081]: E1003 17:07:32.313468 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a742149-dad8-4a69-a1ba-ec38349e2bf7" containerName="mariadb-account-create" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.313481 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a742149-dad8-4a69-a1ba-ec38349e2bf7" containerName="mariadb-account-create" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.313697 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a742149-dad8-4a69-a1ba-ec38349e2bf7" containerName="mariadb-account-create" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.314500 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-mpj7m" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.333506 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-mpj7m"] Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.367733 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x4fv\" (UniqueName: \"kubernetes.io/projected/4e0e8a17-18fb-41f0-b2b5-8a5bccb86233-kube-api-access-8x4fv\") pod \"octavia-persistence-db-create-mpj7m\" (UID: \"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233\") " pod="openstack/octavia-persistence-db-create-mpj7m" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.470167 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x4fv\" (UniqueName: \"kubernetes.io/projected/4e0e8a17-18fb-41f0-b2b5-8a5bccb86233-kube-api-access-8x4fv\") pod \"octavia-persistence-db-create-mpj7m\" (UID: \"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233\") " pod="openstack/octavia-persistence-db-create-mpj7m" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.489983 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x4fv\" (UniqueName: \"kubernetes.io/projected/4e0e8a17-18fb-41f0-b2b5-8a5bccb86233-kube-api-access-8x4fv\") pod \"octavia-persistence-db-create-mpj7m\" (UID: \"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233\") " pod="openstack/octavia-persistence-db-create-mpj7m" Oct 03 17:07:32 crc kubenswrapper[5081]: I1003 17:07:32.647065 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-mpj7m" Oct 03 17:07:33 crc kubenswrapper[5081]: I1003 17:07:33.094445 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-mpj7m"] Oct 03 17:07:33 crc kubenswrapper[5081]: W1003 17:07:33.099011 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e0e8a17_18fb_41f0_b2b5_8a5bccb86233.slice/crio-8b52888a6e508309609ffbb08a7b1426ee7abf100ceb7f255461bc49af532bc3 WatchSource:0}: Error finding container 8b52888a6e508309609ffbb08a7b1426ee7abf100ceb7f255461bc49af532bc3: Status 404 returned error can't find the container with id 8b52888a6e508309609ffbb08a7b1426ee7abf100ceb7f255461bc49af532bc3 Oct 03 17:07:33 crc kubenswrapper[5081]: I1003 17:07:33.666832 5081 generic.go:334] "Generic (PLEG): container finished" podID="4e0e8a17-18fb-41f0-b2b5-8a5bccb86233" containerID="c72350e52513b82f61af0606751a0e0dfe1a9543042b94facb5cd2041f796654" exitCode=0 Oct 03 17:07:33 crc kubenswrapper[5081]: I1003 17:07:33.666868 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-mpj7m" event={"ID":"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233","Type":"ContainerDied","Data":"c72350e52513b82f61af0606751a0e0dfe1a9543042b94facb5cd2041f796654"} Oct 03 17:07:33 crc kubenswrapper[5081]: I1003 17:07:33.667115 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-mpj7m" event={"ID":"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233","Type":"ContainerStarted","Data":"8b52888a6e508309609ffbb08a7b1426ee7abf100ceb7f255461bc49af532bc3"} Oct 03 17:07:35 crc kubenswrapper[5081]: I1003 17:07:35.019357 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-mpj7m" Oct 03 17:07:35 crc kubenswrapper[5081]: I1003 17:07:35.119905 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x4fv\" (UniqueName: \"kubernetes.io/projected/4e0e8a17-18fb-41f0-b2b5-8a5bccb86233-kube-api-access-8x4fv\") pod \"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233\" (UID: \"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233\") " Oct 03 17:07:35 crc kubenswrapper[5081]: I1003 17:07:35.125285 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0e8a17-18fb-41f0-b2b5-8a5bccb86233-kube-api-access-8x4fv" (OuterVolumeSpecName: "kube-api-access-8x4fv") pod "4e0e8a17-18fb-41f0-b2b5-8a5bccb86233" (UID: "4e0e8a17-18fb-41f0-b2b5-8a5bccb86233"). InnerVolumeSpecName "kube-api-access-8x4fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:07:35 crc kubenswrapper[5081]: I1003 17:07:35.222124 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x4fv\" (UniqueName: \"kubernetes.io/projected/4e0e8a17-18fb-41f0-b2b5-8a5bccb86233-kube-api-access-8x4fv\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:35 crc kubenswrapper[5081]: I1003 17:07:35.693782 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-mpj7m" event={"ID":"4e0e8a17-18fb-41f0-b2b5-8a5bccb86233","Type":"ContainerDied","Data":"8b52888a6e508309609ffbb08a7b1426ee7abf100ceb7f255461bc49af532bc3"} Oct 03 17:07:35 crc kubenswrapper[5081]: I1003 17:07:35.693830 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b52888a6e508309609ffbb08a7b1426ee7abf100ceb7f255461bc49af532bc3" Oct 03 17:07:35 crc kubenswrapper[5081]: I1003 17:07:35.694099 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-mpj7m" Oct 03 17:07:40 crc kubenswrapper[5081]: I1003 17:07:40.827919 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:07:40 crc kubenswrapper[5081]: E1003 17:07:40.828714 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.208789 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-198a-account-create-4rxzm"] Oct 03 17:07:43 crc kubenswrapper[5081]: E1003 17:07:43.209534 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0e8a17-18fb-41f0-b2b5-8a5bccb86233" containerName="mariadb-database-create" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.209547 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0e8a17-18fb-41f0-b2b5-8a5bccb86233" containerName="mariadb-database-create" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.209754 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0e8a17-18fb-41f0-b2b5-8a5bccb86233" containerName="mariadb-database-create" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.210536 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-198a-account-create-4rxzm" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.212418 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.219675 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-198a-account-create-4rxzm"] Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.256286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcbrn\" (UniqueName: \"kubernetes.io/projected/7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c-kube-api-access-jcbrn\") pod \"octavia-198a-account-create-4rxzm\" (UID: \"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c\") " pod="openstack/octavia-198a-account-create-4rxzm" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.358484 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcbrn\" (UniqueName: \"kubernetes.io/projected/7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c-kube-api-access-jcbrn\") pod \"octavia-198a-account-create-4rxzm\" (UID: \"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c\") " pod="openstack/octavia-198a-account-create-4rxzm" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.380938 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcbrn\" (UniqueName: \"kubernetes.io/projected/7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c-kube-api-access-jcbrn\") pod \"octavia-198a-account-create-4rxzm\" (UID: \"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c\") " pod="openstack/octavia-198a-account-create-4rxzm" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.529889 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-198a-account-create-4rxzm" Oct 03 17:07:43 crc kubenswrapper[5081]: I1003 17:07:43.978729 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-198a-account-create-4rxzm"] Oct 03 17:07:44 crc kubenswrapper[5081]: I1003 17:07:44.771048 5081 generic.go:334] "Generic (PLEG): container finished" podID="7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c" containerID="4b3034e38b1adf6d3ff1696a40dba4cbbda629e6536ed4553040cbe82215cfe7" exitCode=0 Oct 03 17:07:44 crc kubenswrapper[5081]: I1003 17:07:44.771214 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-198a-account-create-4rxzm" event={"ID":"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c","Type":"ContainerDied","Data":"4b3034e38b1adf6d3ff1696a40dba4cbbda629e6536ed4553040cbe82215cfe7"} Oct 03 17:07:44 crc kubenswrapper[5081]: I1003 17:07:44.771957 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-198a-account-create-4rxzm" event={"ID":"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c","Type":"ContainerStarted","Data":"93b9b9262693e7bc54debac581111a95b426064ed59b42dc805396dcc23fb798"} Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.442547 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-fjdv6" podUID="ed1f167d-3b34-4746-bf62-f4bea485b117" containerName="ovn-controller" probeResult="failure" output=< Oct 03 17:07:45 crc kubenswrapper[5081]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 03 17:07:45 crc kubenswrapper[5081]: > Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.446484 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.457784 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ccgkm" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.556024 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-fjdv6-config-k85th"] Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.563333 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.567212 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.572465 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fjdv6-config-k85th"] Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.722515 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-scripts\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.722618 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run-ovn\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.722862 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-log-ovn\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.722966 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-additional-scripts\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.723036 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.723109 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-997j9\" (UniqueName: \"kubernetes.io/projected/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-kube-api-access-997j9\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.825631 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-scripts\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.825701 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run-ovn\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.825757 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-log-ovn\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.825802 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-additional-scripts\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.825827 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.825859 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-997j9\" (UniqueName: \"kubernetes.io/projected/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-kube-api-access-997j9\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.826368 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run-ovn\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.826532 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.826619 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-log-ovn\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.827024 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-additional-scripts\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.829787 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-scripts\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.853754 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-997j9\" (UniqueName: \"kubernetes.io/projected/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-kube-api-access-997j9\") pod \"ovn-controller-fjdv6-config-k85th\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:45 crc kubenswrapper[5081]: I1003 17:07:45.884372 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.193873 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-198a-account-create-4rxzm" Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.359289 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcbrn\" (UniqueName: \"kubernetes.io/projected/7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c-kube-api-access-jcbrn\") pod \"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c\" (UID: \"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c\") " Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.365079 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c-kube-api-access-jcbrn" (OuterVolumeSpecName: "kube-api-access-jcbrn") pod "7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c" (UID: "7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c"). InnerVolumeSpecName "kube-api-access-jcbrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.457187 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fjdv6-config-k85th"] Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.463067 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcbrn\" (UniqueName: \"kubernetes.io/projected/7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c-kube-api-access-jcbrn\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:46 crc kubenswrapper[5081]: W1003 17:07:46.468708 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dacd8f7_fc2a_4e6a_a308_245d1a17abae.slice/crio-460d166e16b969cf29ed89e34972a693c33ca83d16f456e6d4f628cfe203a06e WatchSource:0}: Error finding container 460d166e16b969cf29ed89e34972a693c33ca83d16f456e6d4f628cfe203a06e: Status 404 returned error can't find the container with id 460d166e16b969cf29ed89e34972a693c33ca83d16f456e6d4f628cfe203a06e Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.791551 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-198a-account-create-4rxzm" event={"ID":"7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c","Type":"ContainerDied","Data":"93b9b9262693e7bc54debac581111a95b426064ed59b42dc805396dcc23fb798"} Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.792132 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93b9b9262693e7bc54debac581111a95b426064ed59b42dc805396dcc23fb798" Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.791623 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-198a-account-create-4rxzm" Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.792918 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fjdv6-config-k85th" event={"ID":"9dacd8f7-fc2a-4e6a-a308-245d1a17abae","Type":"ContainerStarted","Data":"8b39383cc60a98336c0e21512c1eb662697ed32d290d84bbc73f8745c2b75599"} Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.792964 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fjdv6-config-k85th" event={"ID":"9dacd8f7-fc2a-4e6a-a308-245d1a17abae","Type":"ContainerStarted","Data":"460d166e16b969cf29ed89e34972a693c33ca83d16f456e6d4f628cfe203a06e"} Oct 03 17:07:46 crc kubenswrapper[5081]: I1003 17:07:46.812329 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-fjdv6-config-k85th" podStartSLOduration=1.812307692 podStartE2EDuration="1.812307692s" podCreationTimestamp="2025-10-03 17:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:07:46.810033466 +0000 UTC m=+5985.775590079" watchObservedRunningTime="2025-10-03 17:07:46.812307692 +0000 UTC m=+5985.777864305" Oct 03 17:07:47 crc kubenswrapper[5081]: I1003 17:07:47.803318 5081 generic.go:334] "Generic (PLEG): container finished" podID="9dacd8f7-fc2a-4e6a-a308-245d1a17abae" containerID="8b39383cc60a98336c0e21512c1eb662697ed32d290d84bbc73f8745c2b75599" exitCode=0 Oct 03 17:07:47 crc kubenswrapper[5081]: I1003 17:07:47.803420 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fjdv6-config-k85th" event={"ID":"9dacd8f7-fc2a-4e6a-a308-245d1a17abae","Type":"ContainerDied","Data":"8b39383cc60a98336c0e21512c1eb662697ed32d290d84bbc73f8745c2b75599"} Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.139029 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.317873 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-997j9\" (UniqueName: \"kubernetes.io/projected/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-kube-api-access-997j9\") pod \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.317944 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run-ovn\") pod \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.318009 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-additional-scripts\") pod \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.318135 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-log-ovn\") pod \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.318195 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-scripts\") pod \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.318248 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run\") pod \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\" (UID: \"9dacd8f7-fc2a-4e6a-a308-245d1a17abae\") " Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.318870 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run" (OuterVolumeSpecName: "var-run") pod "9dacd8f7-fc2a-4e6a-a308-245d1a17abae" (UID: "9dacd8f7-fc2a-4e6a-a308-245d1a17abae"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.320380 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9dacd8f7-fc2a-4e6a-a308-245d1a17abae" (UID: "9dacd8f7-fc2a-4e6a-a308-245d1a17abae"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.320441 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9dacd8f7-fc2a-4e6a-a308-245d1a17abae" (UID: "9dacd8f7-fc2a-4e6a-a308-245d1a17abae"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.320469 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9dacd8f7-fc2a-4e6a-a308-245d1a17abae" (UID: "9dacd8f7-fc2a-4e6a-a308-245d1a17abae"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.321177 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-scripts" (OuterVolumeSpecName: "scripts") pod "9dacd8f7-fc2a-4e6a-a308-245d1a17abae" (UID: "9dacd8f7-fc2a-4e6a-a308-245d1a17abae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.336470 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-kube-api-access-997j9" (OuterVolumeSpecName: "kube-api-access-997j9") pod "9dacd8f7-fc2a-4e6a-a308-245d1a17abae" (UID: "9dacd8f7-fc2a-4e6a-a308-245d1a17abae"). InnerVolumeSpecName "kube-api-access-997j9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.385531 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-7d488df5bb-2hhk4"] Oct 03 17:07:49 crc kubenswrapper[5081]: E1003 17:07:49.386219 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dacd8f7-fc2a-4e6a-a308-245d1a17abae" containerName="ovn-config" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.386299 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dacd8f7-fc2a-4e6a-a308-245d1a17abae" containerName="ovn-config" Oct 03 17:07:49 crc kubenswrapper[5081]: E1003 17:07:49.386385 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c" containerName="mariadb-account-create" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.386451 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c" containerName="mariadb-account-create" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.386710 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c" containerName="mariadb-account-create" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.389483 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dacd8f7-fc2a-4e6a-a308-245d1a17abae" containerName="ovn-config" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.390947 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.395119 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.395157 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.403688 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-vvnhg" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.408000 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-7d488df5bb-2hhk4"] Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.420233 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.420268 5081 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.420281 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-997j9\" (UniqueName: \"kubernetes.io/projected/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-kube-api-access-997j9\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.420290 5081 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.420298 5081 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.420305 5081 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dacd8f7-fc2a-4e6a-a308-245d1a17abae-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.522208 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/819412d5-054a-44e9-993a-0b4a33fe300b-octavia-run\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.522269 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-scripts\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.522318 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-config-data\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.522478 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-combined-ca-bundle\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.522657 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/819412d5-054a-44e9-993a-0b4a33fe300b-config-data-merged\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.624770 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/819412d5-054a-44e9-993a-0b4a33fe300b-config-data-merged\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.624922 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/819412d5-054a-44e9-993a-0b4a33fe300b-octavia-run\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.624955 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-scripts\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.624985 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-config-data\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.625031 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-combined-ca-bundle\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.625477 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/819412d5-054a-44e9-993a-0b4a33fe300b-octavia-run\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.625758 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/819412d5-054a-44e9-993a-0b4a33fe300b-config-data-merged\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.628449 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-scripts\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.633363 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-combined-ca-bundle\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.633651 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/819412d5-054a-44e9-993a-0b4a33fe300b-config-data\") pod \"octavia-api-7d488df5bb-2hhk4\" (UID: \"819412d5-054a-44e9-993a-0b4a33fe300b\") " pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.709031 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.835585 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fjdv6-config-k85th" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.858236 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fjdv6-config-k85th" event={"ID":"9dacd8f7-fc2a-4e6a-a308-245d1a17abae","Type":"ContainerDied","Data":"460d166e16b969cf29ed89e34972a693c33ca83d16f456e6d4f628cfe203a06e"} Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.858275 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="460d166e16b969cf29ed89e34972a693c33ca83d16f456e6d4f628cfe203a06e" Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.926610 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fjdv6-config-k85th"] Oct 03 17:07:49 crc kubenswrapper[5081]: I1003 17:07:49.936613 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-fjdv6-config-k85th"] Oct 03 17:07:50 crc kubenswrapper[5081]: I1003 17:07:50.189065 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-7d488df5bb-2hhk4"] Oct 03 17:07:50 crc kubenswrapper[5081]: W1003 17:07:50.208080 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod819412d5_054a_44e9_993a_0b4a33fe300b.slice/crio-56e50ddd7eddc8918fa8bbadbed31092e40a507306d5be2aabb9f0902ab8aace WatchSource:0}: Error finding container 56e50ddd7eddc8918fa8bbadbed31092e40a507306d5be2aabb9f0902ab8aace: Status 404 returned error can't find the container with id 56e50ddd7eddc8918fa8bbadbed31092e40a507306d5be2aabb9f0902ab8aace Oct 03 17:07:50 crc kubenswrapper[5081]: I1003 17:07:50.448199 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-fjdv6" Oct 03 17:07:50 crc kubenswrapper[5081]: I1003 17:07:50.844973 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7d488df5bb-2hhk4" event={"ID":"819412d5-054a-44e9-993a-0b4a33fe300b","Type":"ContainerStarted","Data":"56e50ddd7eddc8918fa8bbadbed31092e40a507306d5be2aabb9f0902ab8aace"} Oct 03 17:07:51 crc kubenswrapper[5081]: I1003 17:07:51.844697 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dacd8f7-fc2a-4e6a-a308-245d1a17abae" path="/var/lib/kubelet/pods/9dacd8f7-fc2a-4e6a-a308-245d1a17abae/volumes" Oct 03 17:07:53 crc kubenswrapper[5081]: I1003 17:07:53.827726 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:07:53 crc kubenswrapper[5081]: E1003 17:07:53.828406 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:07:58 crc kubenswrapper[5081]: I1003 17:07:58.919181 5081 generic.go:334] "Generic (PLEG): container finished" podID="819412d5-054a-44e9-993a-0b4a33fe300b" containerID="40090d456e372b91055be4904cad75f5bad092f3eb24e1a795c31bb59254057a" exitCode=0 Oct 03 17:07:58 crc kubenswrapper[5081]: I1003 17:07:58.919272 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7d488df5bb-2hhk4" event={"ID":"819412d5-054a-44e9-993a-0b4a33fe300b","Type":"ContainerDied","Data":"40090d456e372b91055be4904cad75f5bad092f3eb24e1a795c31bb59254057a"} Oct 03 17:07:59 crc kubenswrapper[5081]: I1003 17:07:59.934593 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7d488df5bb-2hhk4" event={"ID":"819412d5-054a-44e9-993a-0b4a33fe300b","Type":"ContainerStarted","Data":"b5b5f52f60c488c8a7b7d8bb5716641f890e0255454582af8f101eea08295ced"} Oct 03 17:07:59 crc kubenswrapper[5081]: I1003 17:07:59.935452 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:59 crc kubenswrapper[5081]: I1003 17:07:59.935474 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:07:59 crc kubenswrapper[5081]: I1003 17:07:59.935484 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7d488df5bb-2hhk4" event={"ID":"819412d5-054a-44e9-993a-0b4a33fe300b","Type":"ContainerStarted","Data":"1dac08a9ad52a50f2fe2aaf53ee6693c8c0150b5613e418b12ea4cd685bb708b"} Oct 03 17:07:59 crc kubenswrapper[5081]: I1003 17:07:59.965779 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-7d488df5bb-2hhk4" podStartSLOduration=2.818509195 podStartE2EDuration="10.965744567s" podCreationTimestamp="2025-10-03 17:07:49 +0000 UTC" firstStartedPulling="2025-10-03 17:07:50.211239499 +0000 UTC m=+5989.176796112" lastFinishedPulling="2025-10-03 17:07:58.358474871 +0000 UTC m=+5997.324031484" observedRunningTime="2025-10-03 17:07:59.956599164 +0000 UTC m=+5998.922155787" watchObservedRunningTime="2025-10-03 17:07:59.965744567 +0000 UTC m=+5998.931301180" Oct 03 17:08:04 crc kubenswrapper[5081]: I1003 17:08:04.827366 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:08:06 crc kubenswrapper[5081]: I1003 17:08:06.019327 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"ba3c449f9ba6ce0a07d4ade825ef0ef780d0fc3b5448da6b455aae74bb2d7b4b"} Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.881117 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-c9kjr"] Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.882953 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.887717 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.887758 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.887808 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.891397 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-c9kjr"] Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.996533 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6c0048-f1a7-4325-90c6-885a102c1696-config-data\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.996723 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b6c0048-f1a7-4325-90c6-885a102c1696-scripts\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.996786 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6b6c0048-f1a7-4325-90c6-885a102c1696-hm-ports\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:07 crc kubenswrapper[5081]: I1003 17:08:07.996816 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6b6c0048-f1a7-4325-90c6-885a102c1696-config-data-merged\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.099443 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6c0048-f1a7-4325-90c6-885a102c1696-config-data\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.099630 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b6c0048-f1a7-4325-90c6-885a102c1696-scripts\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.099681 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6b6c0048-f1a7-4325-90c6-885a102c1696-hm-ports\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.099709 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6b6c0048-f1a7-4325-90c6-885a102c1696-config-data-merged\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.100232 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6b6c0048-f1a7-4325-90c6-885a102c1696-config-data-merged\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.100544 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6b6c0048-f1a7-4325-90c6-885a102c1696-hm-ports\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.104920 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b6c0048-f1a7-4325-90c6-885a102c1696-scripts\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.112961 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6c0048-f1a7-4325-90c6-885a102c1696-config-data\") pod \"octavia-rsyslog-c9kjr\" (UID: \"6b6c0048-f1a7-4325-90c6-885a102c1696\") " pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.200695 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.746488 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-c9kjr"] Oct 03 17:08:08 crc kubenswrapper[5081]: W1003 17:08:08.755034 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b6c0048_f1a7_4325_90c6_885a102c1696.slice/crio-d4afab13001dcb05a71cf667de9e2429d966ed76938db9cccb81060af0359a4c WatchSource:0}: Error finding container d4afab13001dcb05a71cf667de9e2429d966ed76938db9cccb81060af0359a4c: Status 404 returned error can't find the container with id d4afab13001dcb05a71cf667de9e2429d966ed76938db9cccb81060af0359a4c Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.827283 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-678599687f-4tpbx"] Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.831656 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.835932 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Oct 03 17:08:08 crc kubenswrapper[5081]: I1003 17:08:08.854216 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-4tpbx"] Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.000185 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.024019 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1fe8bc5a-8530-4559-9853-b1283f40d51e-httpd-config\") pod \"octavia-image-upload-678599687f-4tpbx\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.024206 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/1fe8bc5a-8530-4559-9853-b1283f40d51e-amphora-image\") pod \"octavia-image-upload-678599687f-4tpbx\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.053644 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c9kjr" event={"ID":"6b6c0048-f1a7-4325-90c6-885a102c1696","Type":"ContainerStarted","Data":"d4afab13001dcb05a71cf667de9e2429d966ed76938db9cccb81060af0359a4c"} Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.125864 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/1fe8bc5a-8530-4559-9853-b1283f40d51e-amphora-image\") pod \"octavia-image-upload-678599687f-4tpbx\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.126028 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1fe8bc5a-8530-4559-9853-b1283f40d51e-httpd-config\") pod \"octavia-image-upload-678599687f-4tpbx\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.126858 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/1fe8bc5a-8530-4559-9853-b1283f40d51e-amphora-image\") pod \"octavia-image-upload-678599687f-4tpbx\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.132619 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1fe8bc5a-8530-4559-9853-b1283f40d51e-httpd-config\") pod \"octavia-image-upload-678599687f-4tpbx\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.180324 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.327689 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-7d488df5bb-2hhk4" Oct 03 17:08:09 crc kubenswrapper[5081]: I1003 17:08:09.668298 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-4tpbx"] Oct 03 17:08:09 crc kubenswrapper[5081]: W1003 17:08:09.678427 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fe8bc5a_8530_4559_9853_b1283f40d51e.slice/crio-a5002688e80e6b2d39b9e21f5820f4bf0faf3a635a40b00d39f8086268e57f49 WatchSource:0}: Error finding container a5002688e80e6b2d39b9e21f5820f4bf0faf3a635a40b00d39f8086268e57f49: Status 404 returned error can't find the container with id a5002688e80e6b2d39b9e21f5820f4bf0faf3a635a40b00d39f8086268e57f49 Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.068806 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-4tpbx" event={"ID":"1fe8bc5a-8530-4559-9853-b1283f40d51e","Type":"ContainerStarted","Data":"a5002688e80e6b2d39b9e21f5820f4bf0faf3a635a40b00d39f8086268e57f49"} Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.238179 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-tzr4h"] Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.239994 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.244529 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.257403 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-tzr4h"] Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.355051 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data-merged\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.355333 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-scripts\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.355460 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-combined-ca-bundle\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.355489 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.460938 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-combined-ca-bundle\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.461343 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.462707 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data-merged\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.462933 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-scripts\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.463999 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data-merged\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.468946 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-combined-ca-bundle\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.470295 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-scripts\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.480550 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data\") pod \"octavia-db-sync-tzr4h\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:10 crc kubenswrapper[5081]: I1003 17:08:10.572112 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:11 crc kubenswrapper[5081]: I1003 17:08:11.089634 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c9kjr" event={"ID":"6b6c0048-f1a7-4325-90c6-885a102c1696","Type":"ContainerStarted","Data":"cca5d6acfe0ebe6f8191f69148b151cc5e820de20543da8a03d5a4f8cf7cf073"} Oct 03 17:08:11 crc kubenswrapper[5081]: I1003 17:08:11.117400 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-tzr4h"] Oct 03 17:08:11 crc kubenswrapper[5081]: W1003 17:08:11.264932 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a96f5ea_1a87_469f_9b6d_0c2501e89bcf.slice/crio-04dfbdbe477f50a55d06b958eed3597652d15c3ea9278e255b2a778471a839ad WatchSource:0}: Error finding container 04dfbdbe477f50a55d06b958eed3597652d15c3ea9278e255b2a778471a839ad: Status 404 returned error can't find the container with id 04dfbdbe477f50a55d06b958eed3597652d15c3ea9278e255b2a778471a839ad Oct 03 17:08:12 crc kubenswrapper[5081]: I1003 17:08:12.110766 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-tzr4h" event={"ID":"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf","Type":"ContainerStarted","Data":"04dfbdbe477f50a55d06b958eed3597652d15c3ea9278e255b2a778471a839ad"} Oct 03 17:08:13 crc kubenswrapper[5081]: I1003 17:08:13.122288 5081 generic.go:334] "Generic (PLEG): container finished" podID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" containerID="aed667435271dab12f1496ec369cd0cb153f08434c483ae900f4f34c92ba0330" exitCode=0 Oct 03 17:08:13 crc kubenswrapper[5081]: I1003 17:08:13.123227 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-tzr4h" event={"ID":"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf","Type":"ContainerDied","Data":"aed667435271dab12f1496ec369cd0cb153f08434c483ae900f4f34c92ba0330"} Oct 03 17:08:13 crc kubenswrapper[5081]: I1003 17:08:13.129717 5081 generic.go:334] "Generic (PLEG): container finished" podID="6b6c0048-f1a7-4325-90c6-885a102c1696" containerID="cca5d6acfe0ebe6f8191f69148b151cc5e820de20543da8a03d5a4f8cf7cf073" exitCode=0 Oct 03 17:08:13 crc kubenswrapper[5081]: I1003 17:08:13.129777 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c9kjr" event={"ID":"6b6c0048-f1a7-4325-90c6-885a102c1696","Type":"ContainerDied","Data":"cca5d6acfe0ebe6f8191f69148b151cc5e820de20543da8a03d5a4f8cf7cf073"} Oct 03 17:08:14 crc kubenswrapper[5081]: I1003 17:08:14.140940 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-tzr4h" event={"ID":"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf","Type":"ContainerStarted","Data":"87bf62b8a7f2de6fb195a94c1444e7c73335244576c9f418c15bb8983b9f6224"} Oct 03 17:08:14 crc kubenswrapper[5081]: I1003 17:08:14.159369 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-tzr4h" podStartSLOduration=4.159347435 podStartE2EDuration="4.159347435s" podCreationTimestamp="2025-10-03 17:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:08:14.156918485 +0000 UTC m=+6013.122475118" watchObservedRunningTime="2025-10-03 17:08:14.159347435 +0000 UTC m=+6013.124904048" Oct 03 17:08:16 crc kubenswrapper[5081]: I1003 17:08:16.158451 5081 generic.go:334] "Generic (PLEG): container finished" podID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" containerID="87bf62b8a7f2de6fb195a94c1444e7c73335244576c9f418c15bb8983b9f6224" exitCode=0 Oct 03 17:08:16 crc kubenswrapper[5081]: I1003 17:08:16.158525 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-tzr4h" event={"ID":"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf","Type":"ContainerDied","Data":"87bf62b8a7f2de6fb195a94c1444e7c73335244576c9f418c15bb8983b9f6224"} Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.489150 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-xxbmw"] Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.491704 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.496193 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.496368 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.496491 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.504844 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-xxbmw"] Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.540973 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-scripts\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.541031 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/faef3e0c-bc19-4475-9c3b-fb8aea539120-config-data-merged\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.541117 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-config-data\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.541169 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/faef3e0c-bc19-4475-9c3b-fb8aea539120-hm-ports\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.541197 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-combined-ca-bundle\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.541217 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-amphora-certs\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.643047 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-config-data\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.643117 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/faef3e0c-bc19-4475-9c3b-fb8aea539120-hm-ports\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.643141 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-combined-ca-bundle\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.643161 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-amphora-certs\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.643250 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-scripts\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.643276 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/faef3e0c-bc19-4475-9c3b-fb8aea539120-config-data-merged\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.644703 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/faef3e0c-bc19-4475-9c3b-fb8aea539120-config-data-merged\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.646815 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/faef3e0c-bc19-4475-9c3b-fb8aea539120-hm-ports\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.650999 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-combined-ca-bundle\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.651215 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-config-data\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.652033 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-amphora-certs\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.652757 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/faef3e0c-bc19-4475-9c3b-fb8aea539120-scripts\") pod \"octavia-worker-xxbmw\" (UID: \"faef3e0c-bc19-4475-9c3b-fb8aea539120\") " pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:17 crc kubenswrapper[5081]: I1003 17:08:17.819713 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.577359 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.762964 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data\") pod \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.763054 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-combined-ca-bundle\") pod \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.763127 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data-merged\") pod \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.763228 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-scripts\") pod \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\" (UID: \"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf\") " Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.770424 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-scripts" (OuterVolumeSpecName: "scripts") pod "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" (UID: "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.783084 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data" (OuterVolumeSpecName: "config-data") pod "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" (UID: "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.800176 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" (UID: "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.809616 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" (UID: "2a96f5ea-1a87-469f-9b6d-0c2501e89bcf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.865088 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.865122 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.865162 5081 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-config-data-merged\") on node \"crc\" DevicePath \"\"" Oct 03 17:08:18 crc kubenswrapper[5081]: I1003 17:08:18.865176 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.189614 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-4tpbx" event={"ID":"1fe8bc5a-8530-4559-9853-b1283f40d51e","Type":"ContainerStarted","Data":"6f5fb92857d6af54e199b5fa5db598226231b603692a66dea02714aab4483a09"} Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.192329 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-tzr4h" Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.192405 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-tzr4h" event={"ID":"2a96f5ea-1a87-469f-9b6d-0c2501e89bcf","Type":"ContainerDied","Data":"04dfbdbe477f50a55d06b958eed3597652d15c3ea9278e255b2a778471a839ad"} Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.192444 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04dfbdbe477f50a55d06b958eed3597652d15c3ea9278e255b2a778471a839ad" Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.196841 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c9kjr" event={"ID":"6b6c0048-f1a7-4325-90c6-885a102c1696","Type":"ContainerStarted","Data":"9f939ffe7f8f1d92629532b1e1801dd71fbec49ca1ab8866dad04a10f47f9570"} Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.197471 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.232911 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-c9kjr" podStartSLOduration=2.310614291 podStartE2EDuration="12.232892387s" podCreationTimestamp="2025-10-03 17:08:07 +0000 UTC" firstStartedPulling="2025-10-03 17:08:08.768111335 +0000 UTC m=+6007.733667948" lastFinishedPulling="2025-10-03 17:08:18.690389431 +0000 UTC m=+6017.655946044" observedRunningTime="2025-10-03 17:08:19.225038261 +0000 UTC m=+6018.190594894" watchObservedRunningTime="2025-10-03 17:08:19.232892387 +0000 UTC m=+6018.198448990" Oct 03 17:08:19 crc kubenswrapper[5081]: I1003 17:08:19.470734 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-xxbmw"] Oct 03 17:08:20 crc kubenswrapper[5081]: I1003 17:08:20.207998 5081 generic.go:334] "Generic (PLEG): container finished" podID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerID="6f5fb92857d6af54e199b5fa5db598226231b603692a66dea02714aab4483a09" exitCode=0 Oct 03 17:08:20 crc kubenswrapper[5081]: I1003 17:08:20.208108 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-4tpbx" event={"ID":"1fe8bc5a-8530-4559-9853-b1283f40d51e","Type":"ContainerDied","Data":"6f5fb92857d6af54e199b5fa5db598226231b603692a66dea02714aab4483a09"} Oct 03 17:08:20 crc kubenswrapper[5081]: I1003 17:08:20.209992 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-xxbmw" event={"ID":"faef3e0c-bc19-4475-9c3b-fb8aea539120","Type":"ContainerStarted","Data":"4d5d1a47fa101dbeae3b1713e64e072a431823ad39c01995b172076cb5a4ddd6"} Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.171153 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-5r6rb"] Oct 03 17:08:21 crc kubenswrapper[5081]: E1003 17:08:21.172014 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" containerName="octavia-db-sync" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.172036 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" containerName="octavia-db-sync" Oct 03 17:08:21 crc kubenswrapper[5081]: E1003 17:08:21.172055 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" containerName="init" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.172063 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" containerName="init" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.172249 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" containerName="octavia-db-sync" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.173310 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.176104 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.176286 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.184199 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-5r6rb"] Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.316099 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-hm-ports\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.316182 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-config-data-merged\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.316242 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-config-data\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.316310 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-combined-ca-bundle\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.316337 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-amphora-certs\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.316417 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-scripts\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.417973 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-combined-ca-bundle\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.418032 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-amphora-certs\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.418141 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-scripts\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.418224 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-hm-ports\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.418274 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-config-data-merged\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.418322 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-config-data\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.419916 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-config-data-merged\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.420460 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-hm-ports\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.424302 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-amphora-certs\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.424713 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-config-data\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.425238 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-combined-ca-bundle\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.438766 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f-scripts\") pod \"octavia-housekeeping-5r6rb\" (UID: \"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f\") " pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:21 crc kubenswrapper[5081]: I1003 17:08:21.495351 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:22 crc kubenswrapper[5081]: I1003 17:08:22.189685 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-5r6rb"] Oct 03 17:08:22 crc kubenswrapper[5081]: I1003 17:08:22.233013 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-5r6rb" event={"ID":"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f","Type":"ContainerStarted","Data":"1cf8d5f357392c3c4f230c2c6c0ba207d2013eb39eeea15e5bacd827382ab20b"} Oct 03 17:08:22 crc kubenswrapper[5081]: I1003 17:08:22.236727 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-4tpbx" event={"ID":"1fe8bc5a-8530-4559-9853-b1283f40d51e","Type":"ContainerStarted","Data":"4b3aa35afe28889c5c016847339e6d28b2182f297b3a80f088360d94d44bd30a"} Oct 03 17:08:22 crc kubenswrapper[5081]: I1003 17:08:22.265677 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-678599687f-4tpbx" podStartSLOduration=5.094113611 podStartE2EDuration="14.265654121s" podCreationTimestamp="2025-10-03 17:08:08 +0000 UTC" firstStartedPulling="2025-10-03 17:08:09.681976694 +0000 UTC m=+6008.647533307" lastFinishedPulling="2025-10-03 17:08:18.853517204 +0000 UTC m=+6017.819073817" observedRunningTime="2025-10-03 17:08:22.26106957 +0000 UTC m=+6021.226626183" watchObservedRunningTime="2025-10-03 17:08:22.265654121 +0000 UTC m=+6021.231210744" Oct 03 17:08:24 crc kubenswrapper[5081]: I1003 17:08:24.255351 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-xxbmw" event={"ID":"faef3e0c-bc19-4475-9c3b-fb8aea539120","Type":"ContainerStarted","Data":"997c9aee6dfaae84160a49d95b567ea022802292ee8f056687630dd50d49d91a"} Oct 03 17:08:25 crc kubenswrapper[5081]: I1003 17:08:25.267209 5081 generic.go:334] "Generic (PLEG): container finished" podID="faef3e0c-bc19-4475-9c3b-fb8aea539120" containerID="997c9aee6dfaae84160a49d95b567ea022802292ee8f056687630dd50d49d91a" exitCode=0 Oct 03 17:08:25 crc kubenswrapper[5081]: I1003 17:08:25.267436 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-xxbmw" event={"ID":"faef3e0c-bc19-4475-9c3b-fb8aea539120","Type":"ContainerDied","Data":"997c9aee6dfaae84160a49d95b567ea022802292ee8f056687630dd50d49d91a"} Oct 03 17:08:25 crc kubenswrapper[5081]: I1003 17:08:25.269718 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-5r6rb" event={"ID":"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f","Type":"ContainerStarted","Data":"2dd94bd317b1a3888390db98343ce44f244fc2e9b751e457800ff75c0c776048"} Oct 03 17:08:26 crc kubenswrapper[5081]: I1003 17:08:26.281055 5081 generic.go:334] "Generic (PLEG): container finished" podID="9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f" containerID="2dd94bd317b1a3888390db98343ce44f244fc2e9b751e457800ff75c0c776048" exitCode=0 Oct 03 17:08:26 crc kubenswrapper[5081]: I1003 17:08:26.281118 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-5r6rb" event={"ID":"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f","Type":"ContainerDied","Data":"2dd94bd317b1a3888390db98343ce44f244fc2e9b751e457800ff75c0c776048"} Oct 03 17:08:26 crc kubenswrapper[5081]: I1003 17:08:26.284829 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-xxbmw" event={"ID":"faef3e0c-bc19-4475-9c3b-fb8aea539120","Type":"ContainerStarted","Data":"5c499746449425fc419fbffe7a7a871f8ef1dfcdc5cfcaf701bd453823eac19f"} Oct 03 17:08:26 crc kubenswrapper[5081]: I1003 17:08:26.285001 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:26 crc kubenswrapper[5081]: I1003 17:08:26.329497 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-xxbmw" podStartSLOduration=5.784669951 podStartE2EDuration="9.329441875s" podCreationTimestamp="2025-10-03 17:08:17 +0000 UTC" firstStartedPulling="2025-10-03 17:08:19.477223435 +0000 UTC m=+6018.442780048" lastFinishedPulling="2025-10-03 17:08:23.021995359 +0000 UTC m=+6021.987551972" observedRunningTime="2025-10-03 17:08:26.321225489 +0000 UTC m=+6025.286782102" watchObservedRunningTime="2025-10-03 17:08:26.329441875 +0000 UTC m=+6025.294998488" Oct 03 17:08:27 crc kubenswrapper[5081]: I1003 17:08:27.303255 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-5r6rb" event={"ID":"9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f","Type":"ContainerStarted","Data":"505abdf2634d83c1e2a6da62f1bc806b3f6124baded3010fe1e93cc15450fe6c"} Oct 03 17:08:27 crc kubenswrapper[5081]: I1003 17:08:27.304532 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:27 crc kubenswrapper[5081]: I1003 17:08:27.322698 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-5r6rb" podStartSLOduration=4.259897798 podStartE2EDuration="6.322677438s" podCreationTimestamp="2025-10-03 17:08:21 +0000 UTC" firstStartedPulling="2025-10-03 17:08:22.213256914 +0000 UTC m=+6021.178813527" lastFinishedPulling="2025-10-03 17:08:24.276036554 +0000 UTC m=+6023.241593167" observedRunningTime="2025-10-03 17:08:27.32137646 +0000 UTC m=+6026.286933103" watchObservedRunningTime="2025-10-03 17:08:27.322677438 +0000 UTC m=+6026.288234051" Oct 03 17:08:32 crc kubenswrapper[5081]: I1003 17:08:32.853042 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-xxbmw" Oct 03 17:08:36 crc kubenswrapper[5081]: I1003 17:08:36.528354 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-5r6rb" Oct 03 17:08:38 crc kubenswrapper[5081]: I1003 17:08:38.241625 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-c9kjr" Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.223652 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-678599687f-4tpbx"] Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.224378 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-678599687f-4tpbx" podUID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerName="octavia-amphora-httpd" containerID="cri-o://4b3aa35afe28889c5c016847339e6d28b2182f297b3a80f088360d94d44bd30a" gracePeriod=30 Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.478753 5081 generic.go:334] "Generic (PLEG): container finished" podID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerID="4b3aa35afe28889c5c016847339e6d28b2182f297b3a80f088360d94d44bd30a" exitCode=0 Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.478807 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-4tpbx" event={"ID":"1fe8bc5a-8530-4559-9853-b1283f40d51e","Type":"ContainerDied","Data":"4b3aa35afe28889c5c016847339e6d28b2182f297b3a80f088360d94d44bd30a"} Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.714211 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.800349 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/1fe8bc5a-8530-4559-9853-b1283f40d51e-amphora-image\") pod \"1fe8bc5a-8530-4559-9853-b1283f40d51e\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.800455 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1fe8bc5a-8530-4559-9853-b1283f40d51e-httpd-config\") pod \"1fe8bc5a-8530-4559-9853-b1283f40d51e\" (UID: \"1fe8bc5a-8530-4559-9853-b1283f40d51e\") " Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.857625 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fe8bc5a-8530-4559-9853-b1283f40d51e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "1fe8bc5a-8530-4559-9853-b1283f40d51e" (UID: "1fe8bc5a-8530-4559-9853-b1283f40d51e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.892329 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fe8bc5a-8530-4559-9853-b1283f40d51e-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "1fe8bc5a-8530-4559-9853-b1283f40d51e" (UID: "1fe8bc5a-8530-4559-9853-b1283f40d51e"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.909009 5081 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/1fe8bc5a-8530-4559-9853-b1283f40d51e-amphora-image\") on node \"crc\" DevicePath \"\"" Oct 03 17:08:44 crc kubenswrapper[5081]: I1003 17:08:44.909044 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1fe8bc5a-8530-4559-9853-b1283f40d51e-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:08:45 crc kubenswrapper[5081]: I1003 17:08:45.490762 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-4tpbx" event={"ID":"1fe8bc5a-8530-4559-9853-b1283f40d51e","Type":"ContainerDied","Data":"a5002688e80e6b2d39b9e21f5820f4bf0faf3a635a40b00d39f8086268e57f49"} Oct 03 17:08:45 crc kubenswrapper[5081]: I1003 17:08:45.491120 5081 scope.go:117] "RemoveContainer" containerID="4b3aa35afe28889c5c016847339e6d28b2182f297b3a80f088360d94d44bd30a" Oct 03 17:08:45 crc kubenswrapper[5081]: I1003 17:08:45.491054 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-4tpbx" Oct 03 17:08:45 crc kubenswrapper[5081]: I1003 17:08:45.526035 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-678599687f-4tpbx"] Oct 03 17:08:45 crc kubenswrapper[5081]: I1003 17:08:45.526771 5081 scope.go:117] "RemoveContainer" containerID="6f5fb92857d6af54e199b5fa5db598226231b603692a66dea02714aab4483a09" Oct 03 17:08:45 crc kubenswrapper[5081]: I1003 17:08:45.534776 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-678599687f-4tpbx"] Oct 03 17:08:45 crc kubenswrapper[5081]: I1003 17:08:45.841037 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fe8bc5a-8530-4559-9853-b1283f40d51e" path="/var/lib/kubelet/pods/1fe8bc5a-8530-4559-9853-b1283f40d51e/volumes" Oct 03 17:08:49 crc kubenswrapper[5081]: I1003 17:08:49.999226 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-678599687f-c5qjv"] Oct 03 17:08:50 crc kubenswrapper[5081]: E1003 17:08:50.000249 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerName="init" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.000265 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerName="init" Oct 03 17:08:50 crc kubenswrapper[5081]: E1003 17:08:50.000299 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerName="octavia-amphora-httpd" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.000307 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerName="octavia-amphora-httpd" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.000671 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fe8bc5a-8530-4559-9853-b1283f40d51e" containerName="octavia-amphora-httpd" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.001883 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.005062 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.019772 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-c5qjv"] Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.077161 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/6ee77da4-e567-42df-b018-d5c1a2bb0c59-amphora-image\") pod \"octavia-image-upload-678599687f-c5qjv\" (UID: \"6ee77da4-e567-42df-b018-d5c1a2bb0c59\") " pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.077280 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ee77da4-e567-42df-b018-d5c1a2bb0c59-httpd-config\") pod \"octavia-image-upload-678599687f-c5qjv\" (UID: \"6ee77da4-e567-42df-b018-d5c1a2bb0c59\") " pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.179087 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/6ee77da4-e567-42df-b018-d5c1a2bb0c59-amphora-image\") pod \"octavia-image-upload-678599687f-c5qjv\" (UID: \"6ee77da4-e567-42df-b018-d5c1a2bb0c59\") " pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.179159 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ee77da4-e567-42df-b018-d5c1a2bb0c59-httpd-config\") pod \"octavia-image-upload-678599687f-c5qjv\" (UID: \"6ee77da4-e567-42df-b018-d5c1a2bb0c59\") " pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.179537 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/6ee77da4-e567-42df-b018-d5c1a2bb0c59-amphora-image\") pod \"octavia-image-upload-678599687f-c5qjv\" (UID: \"6ee77da4-e567-42df-b018-d5c1a2bb0c59\") " pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.195263 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ee77da4-e567-42df-b018-d5c1a2bb0c59-httpd-config\") pod \"octavia-image-upload-678599687f-c5qjv\" (UID: \"6ee77da4-e567-42df-b018-d5c1a2bb0c59\") " pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.350811 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-c5qjv" Oct 03 17:08:50 crc kubenswrapper[5081]: I1003 17:08:50.808158 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-c5qjv"] Oct 03 17:08:51 crc kubenswrapper[5081]: I1003 17:08:51.546781 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-c5qjv" event={"ID":"6ee77da4-e567-42df-b018-d5c1a2bb0c59","Type":"ContainerStarted","Data":"d4f4c9b4348ad7f216b167bd985647460d1f5047429fefc700c8ddc050c2f2e7"} Oct 03 17:08:51 crc kubenswrapper[5081]: I1003 17:08:51.547235 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-c5qjv" event={"ID":"6ee77da4-e567-42df-b018-d5c1a2bb0c59","Type":"ContainerStarted","Data":"05c555d7527f20757e2a00410eb735bd4ccb36bfb7ec0bdb6c9a0e930089bff2"} Oct 03 17:08:52 crc kubenswrapper[5081]: I1003 17:08:52.032987 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-vfkpg"] Oct 03 17:08:52 crc kubenswrapper[5081]: I1003 17:08:52.042323 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-vfkpg"] Oct 03 17:08:52 crc kubenswrapper[5081]: I1003 17:08:52.556639 5081 generic.go:334] "Generic (PLEG): container finished" podID="6ee77da4-e567-42df-b018-d5c1a2bb0c59" containerID="d4f4c9b4348ad7f216b167bd985647460d1f5047429fefc700c8ddc050c2f2e7" exitCode=0 Oct 03 17:08:52 crc kubenswrapper[5081]: I1003 17:08:52.556679 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-c5qjv" event={"ID":"6ee77da4-e567-42df-b018-d5c1a2bb0c59","Type":"ContainerDied","Data":"d4f4c9b4348ad7f216b167bd985647460d1f5047429fefc700c8ddc050c2f2e7"} Oct 03 17:08:53 crc kubenswrapper[5081]: I1003 17:08:53.566315 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-c5qjv" event={"ID":"6ee77da4-e567-42df-b018-d5c1a2bb0c59","Type":"ContainerStarted","Data":"519b8b8e02ce5652b339374f4ce77e34148695eef9f9dac76f71e35f2d66d005"} Oct 03 17:08:53 crc kubenswrapper[5081]: I1003 17:08:53.599437 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-678599687f-c5qjv" podStartSLOduration=4.145205566 podStartE2EDuration="4.599418222s" podCreationTimestamp="2025-10-03 17:08:49 +0000 UTC" firstStartedPulling="2025-10-03 17:08:50.816372582 +0000 UTC m=+6049.781929195" lastFinishedPulling="2025-10-03 17:08:51.270585238 +0000 UTC m=+6050.236141851" observedRunningTime="2025-10-03 17:08:53.581804846 +0000 UTC m=+6052.547361469" watchObservedRunningTime="2025-10-03 17:08:53.599418222 +0000 UTC m=+6052.564974835" Oct 03 17:08:53 crc kubenswrapper[5081]: I1003 17:08:53.839078 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="218e531e-e3bb-4690-a0c9-31a6a56c01ac" path="/var/lib/kubelet/pods/218e531e-e3bb-4690-a0c9-31a6a56c01ac/volumes" Oct 03 17:09:02 crc kubenswrapper[5081]: I1003 17:09:02.037587 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6a43-account-create-dcnxj"] Oct 03 17:09:02 crc kubenswrapper[5081]: I1003 17:09:02.046574 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6a43-account-create-dcnxj"] Oct 03 17:09:03 crc kubenswrapper[5081]: I1003 17:09:03.838303 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e888ae0e-fada-4076-9f60-c20eaf243332" path="/var/lib/kubelet/pods/e888ae0e-fada-4076-9f60-c20eaf243332/volumes" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.032369 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-pws7r"] Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.041253 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-pws7r"] Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.802175 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-sjtg8"] Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.804362 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.806573 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.806918 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.818284 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-sjtg8"] Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.842580 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10c245e9-2377-4ab8-9048-41bde14b0d68" path="/var/lib/kubelet/pods/10c245e9-2377-4ab8-9048-41bde14b0d68/volumes" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.939713 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-amphora-certs\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.939768 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-combined-ca-bundle\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.939842 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6d2de904-d28d-44cf-95ba-1be8e12f2699-config-data-merged\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.939891 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6d2de904-d28d-44cf-95ba-1be8e12f2699-hm-ports\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.939959 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-config-data\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:09 crc kubenswrapper[5081]: I1003 17:09:09.940027 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-scripts\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.041681 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-amphora-certs\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.041730 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-combined-ca-bundle\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.041770 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6d2de904-d28d-44cf-95ba-1be8e12f2699-config-data-merged\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.041804 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6d2de904-d28d-44cf-95ba-1be8e12f2699-hm-ports\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.041847 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-config-data\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.041884 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-scripts\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.043608 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6d2de904-d28d-44cf-95ba-1be8e12f2699-config-data-merged\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.043787 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6d2de904-d28d-44cf-95ba-1be8e12f2699-hm-ports\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.048429 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-amphora-certs\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.048689 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-scripts\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.049479 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-config-data\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.050622 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2de904-d28d-44cf-95ba-1be8e12f2699-combined-ca-bundle\") pod \"octavia-healthmanager-sjtg8\" (UID: \"6d2de904-d28d-44cf-95ba-1be8e12f2699\") " pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.131211 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.710764 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-sjtg8"] Oct 03 17:09:10 crc kubenswrapper[5081]: I1003 17:09:10.726605 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-sjtg8" event={"ID":"6d2de904-d28d-44cf-95ba-1be8e12f2699","Type":"ContainerStarted","Data":"365ec0ee4a59f8e1ca3ac1629e76943a4fc5b0505b11d114d7fb77abea598fba"} Oct 03 17:09:11 crc kubenswrapper[5081]: I1003 17:09:11.737366 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-sjtg8" event={"ID":"6d2de904-d28d-44cf-95ba-1be8e12f2699","Type":"ContainerStarted","Data":"e1d50e29fc360d9fca9560dd3ad03fc003a4687d1b9185e3721b1cf2d2fc4c62"} Oct 03 17:09:13 crc kubenswrapper[5081]: I1003 17:09:13.755958 5081 generic.go:334] "Generic (PLEG): container finished" podID="6d2de904-d28d-44cf-95ba-1be8e12f2699" containerID="e1d50e29fc360d9fca9560dd3ad03fc003a4687d1b9185e3721b1cf2d2fc4c62" exitCode=0 Oct 03 17:09:13 crc kubenswrapper[5081]: I1003 17:09:13.756069 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-sjtg8" event={"ID":"6d2de904-d28d-44cf-95ba-1be8e12f2699","Type":"ContainerDied","Data":"e1d50e29fc360d9fca9560dd3ad03fc003a4687d1b9185e3721b1cf2d2fc4c62"} Oct 03 17:09:14 crc kubenswrapper[5081]: I1003 17:09:14.768321 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-sjtg8" event={"ID":"6d2de904-d28d-44cf-95ba-1be8e12f2699","Type":"ContainerStarted","Data":"2564c0a87b42d3db90d99882fcb4c6ef93a9dab21548e3f4f0e2e1ee36200e33"} Oct 03 17:09:14 crc kubenswrapper[5081]: I1003 17:09:14.768882 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:14 crc kubenswrapper[5081]: I1003 17:09:14.794337 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-sjtg8" podStartSLOduration=5.794318497 podStartE2EDuration="5.794318497s" podCreationTimestamp="2025-10-03 17:09:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:09:14.79198611 +0000 UTC m=+6073.757542733" watchObservedRunningTime="2025-10-03 17:09:14.794318497 +0000 UTC m=+6073.759875130" Oct 03 17:09:23 crc kubenswrapper[5081]: I1003 17:09:23.755808 5081 scope.go:117] "RemoveContainer" containerID="a42cb1ef112f1313e16b17cfc3b03f8475dfcdc4853e99f0c8655266d4e6649e" Oct 03 17:09:23 crc kubenswrapper[5081]: I1003 17:09:23.781605 5081 scope.go:117] "RemoveContainer" containerID="b99cb7ec76ba8a5eb7f9863072a3efe8a5184fbc204101282ac97f3a34c33d61" Oct 03 17:09:23 crc kubenswrapper[5081]: I1003 17:09:23.830138 5081 scope.go:117] "RemoveContainer" containerID="68ea8064f8b3f2007f1e817741ac213a8089a36cf9c798fd7fc871e1d8318c60" Oct 03 17:09:25 crc kubenswrapper[5081]: I1003 17:09:25.161240 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-sjtg8" Oct 03 17:09:35 crc kubenswrapper[5081]: I1003 17:09:35.038091 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-25czj"] Oct 03 17:09:35 crc kubenswrapper[5081]: I1003 17:09:35.047944 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-25czj"] Oct 03 17:09:35 crc kubenswrapper[5081]: I1003 17:09:35.840494 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f193389-e587-4337-ae08-a3ec10e49ff6" path="/var/lib/kubelet/pods/2f193389-e587-4337-ae08-a3ec10e49ff6/volumes" Oct 03 17:09:45 crc kubenswrapper[5081]: I1003 17:09:45.040265 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7194-account-create-5pft9"] Oct 03 17:09:45 crc kubenswrapper[5081]: I1003 17:09:45.049104 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7194-account-create-5pft9"] Oct 03 17:09:45 crc kubenswrapper[5081]: I1003 17:09:45.840161 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92f050c3-8dc0-46b8-b399-a84d91b31398" path="/var/lib/kubelet/pods/92f050c3-8dc0-46b8-b399-a84d91b31398/volumes" Oct 03 17:09:55 crc kubenswrapper[5081]: I1003 17:09:55.043305 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-zrzdr"] Oct 03 17:09:55 crc kubenswrapper[5081]: I1003 17:09:55.052861 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-zrzdr"] Oct 03 17:09:55 crc kubenswrapper[5081]: I1003 17:09:55.839872 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0adfd05f-b2bb-4e10-b343-54fb084d6e73" path="/var/lib/kubelet/pods/0adfd05f-b2bb-4e10-b343-54fb084d6e73/volumes" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.026782 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-fdd9b66bf-qkn6b"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.030599 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.036146 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.036349 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-kpm5f" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.036483 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.036640 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.060118 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-fdd9b66bf-qkn6b"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.109223 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.109491 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-log" containerID="cri-o://1bcca2f2a9bb1086f916a6cb1c1c1701092af01c703518c1fd720876f7f11fde" gracePeriod=30 Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.109532 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-httpd" containerID="cri-o://8563384337f9d0433ee70252cdf3eafa306f54b7a3ad9ba320231c6a25526852" gracePeriod=30 Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.157429 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-77d89b49f7-wtlz7"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.162414 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.172402 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-77d89b49f7-wtlz7"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.188280 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-config-data\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.188443 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2lt7\" (UniqueName: \"kubernetes.io/projected/8cdfb721-1db5-4240-8ef8-46870fbc84ed-kube-api-access-n2lt7\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.188502 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdfb721-1db5-4240-8ef8-46870fbc84ed-logs\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.188543 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdfb721-1db5-4240-8ef8-46870fbc84ed-horizon-secret-key\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.188782 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-scripts\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.231762 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.232256 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-log" containerID="cri-o://acdc661f2e244c0c065a5bc97bdb9db20f5c485759f7b2486ab3cae7b7456a2a" gracePeriod=30 Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.232756 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-httpd" containerID="cri-o://628a28388f28676e53095920db723214ae442bb97698a0b3fb656802895ecd19" gracePeriod=30 Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290116 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fdad3f4-cb8a-4632-9469-8dad640c88f5-horizon-secret-key\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290268 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-scripts\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290305 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-config-data\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290334 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-scripts\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290382 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fdad3f4-cb8a-4632-9469-8dad640c88f5-logs\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290439 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2lt7\" (UniqueName: \"kubernetes.io/projected/8cdfb721-1db5-4240-8ef8-46870fbc84ed-kube-api-access-n2lt7\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290463 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpfc2\" (UniqueName: \"kubernetes.io/projected/6fdad3f4-cb8a-4632-9469-8dad640c88f5-kube-api-access-hpfc2\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290489 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-config-data\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290527 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdfb721-1db5-4240-8ef8-46870fbc84ed-logs\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.290580 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdfb721-1db5-4240-8ef8-46870fbc84ed-horizon-secret-key\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.291184 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdfb721-1db5-4240-8ef8-46870fbc84ed-logs\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.291280 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-scripts\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.292277 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-config-data\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.295888 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdfb721-1db5-4240-8ef8-46870fbc84ed-horizon-secret-key\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.307343 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2lt7\" (UniqueName: \"kubernetes.io/projected/8cdfb721-1db5-4240-8ef8-46870fbc84ed-kube-api-access-n2lt7\") pod \"horizon-fdd9b66bf-qkn6b\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.334041 5081 generic.go:334] "Generic (PLEG): container finished" podID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerID="1bcca2f2a9bb1086f916a6cb1c1c1701092af01c703518c1fd720876f7f11fde" exitCode=143 Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.334085 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0cbd888b-86fb-4803-ae3a-65361d9eec55","Type":"ContainerDied","Data":"1bcca2f2a9bb1086f916a6cb1c1c1701092af01c703518c1fd720876f7f11fde"} Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.371110 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.398593 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-scripts\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.398710 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fdad3f4-cb8a-4632-9469-8dad640c88f5-logs\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.398817 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpfc2\" (UniqueName: \"kubernetes.io/projected/6fdad3f4-cb8a-4632-9469-8dad640c88f5-kube-api-access-hpfc2\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.398847 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-config-data\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.398982 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fdad3f4-cb8a-4632-9469-8dad640c88f5-horizon-secret-key\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.399959 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fdad3f4-cb8a-4632-9469-8dad640c88f5-logs\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.400603 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-scripts\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.401604 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-config-data\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.405423 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fdad3f4-cb8a-4632-9469-8dad640c88f5-horizon-secret-key\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.426023 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpfc2\" (UniqueName: \"kubernetes.io/projected/6fdad3f4-cb8a-4632-9469-8dad640c88f5-kube-api-access-hpfc2\") pod \"horizon-77d89b49f7-wtlz7\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.493943 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.704361 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-fdd9b66bf-qkn6b"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.745750 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-89cbfd487-5hdbz"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.755393 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.788332 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-89cbfd487-5hdbz"] Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.919308 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5692d3e9-857c-42bb-acbc-68142e710415-horizon-secret-key\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.919480 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-scripts\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.919575 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5692d3e9-857c-42bb-acbc-68142e710415-logs\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.919597 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghkgt\" (UniqueName: \"kubernetes.io/projected/5692d3e9-857c-42bb-acbc-68142e710415-kube-api-access-ghkgt\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:15 crc kubenswrapper[5081]: I1003 17:10:15.919618 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-config-data\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.021303 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-fdd9b66bf-qkn6b"] Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.021770 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5692d3e9-857c-42bb-acbc-68142e710415-horizon-secret-key\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.021909 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-scripts\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.021977 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5692d3e9-857c-42bb-acbc-68142e710415-logs\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.021998 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghkgt\" (UniqueName: \"kubernetes.io/projected/5692d3e9-857c-42bb-acbc-68142e710415-kube-api-access-ghkgt\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.022109 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-config-data\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.022866 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-scripts\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.023739 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5692d3e9-857c-42bb-acbc-68142e710415-logs\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.024239 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-config-data\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.028512 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5692d3e9-857c-42bb-acbc-68142e710415-horizon-secret-key\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.030682 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.041544 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghkgt\" (UniqueName: \"kubernetes.io/projected/5692d3e9-857c-42bb-acbc-68142e710415-kube-api-access-ghkgt\") pod \"horizon-89cbfd487-5hdbz\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.143263 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:16 crc kubenswrapper[5081]: W1003 17:10:16.185376 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fdad3f4_cb8a_4632_9469_8dad640c88f5.slice/crio-d8198dfa3cf1d64e13af604f6226c59fb7551e8ba1d8eb92e17bdb517ff1049b WatchSource:0}: Error finding container d8198dfa3cf1d64e13af604f6226c59fb7551e8ba1d8eb92e17bdb517ff1049b: Status 404 returned error can't find the container with id d8198dfa3cf1d64e13af604f6226c59fb7551e8ba1d8eb92e17bdb517ff1049b Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.186551 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-77d89b49f7-wtlz7"] Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.350486 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d89b49f7-wtlz7" event={"ID":"6fdad3f4-cb8a-4632-9469-8dad640c88f5","Type":"ContainerStarted","Data":"d8198dfa3cf1d64e13af604f6226c59fb7551e8ba1d8eb92e17bdb517ff1049b"} Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.353301 5081 generic.go:334] "Generic (PLEG): container finished" podID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerID="acdc661f2e244c0c065a5bc97bdb9db20f5c485759f7b2486ab3cae7b7456a2a" exitCode=143 Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.353396 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd","Type":"ContainerDied","Data":"acdc661f2e244c0c065a5bc97bdb9db20f5c485759f7b2486ab3cae7b7456a2a"} Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.354765 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fdd9b66bf-qkn6b" event={"ID":"8cdfb721-1db5-4240-8ef8-46870fbc84ed","Type":"ContainerStarted","Data":"f2361354f120e8e4ce635034e63e0fe61e7be19fbdf353ed19c7170c02f30344"} Oct 03 17:10:16 crc kubenswrapper[5081]: I1003 17:10:16.594425 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-89cbfd487-5hdbz"] Oct 03 17:10:16 crc kubenswrapper[5081]: W1003 17:10:16.599522 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5692d3e9_857c_42bb_acbc_68142e710415.slice/crio-ccd0635724c231b8451a0ebbc0f731a09d48561e8bebd2b52a48fe544c6e00e5 WatchSource:0}: Error finding container ccd0635724c231b8451a0ebbc0f731a09d48561e8bebd2b52a48fe544c6e00e5: Status 404 returned error can't find the container with id ccd0635724c231b8451a0ebbc0f731a09d48561e8bebd2b52a48fe544c6e00e5 Oct 03 17:10:17 crc kubenswrapper[5081]: I1003 17:10:17.366547 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-89cbfd487-5hdbz" event={"ID":"5692d3e9-857c-42bb-acbc-68142e710415","Type":"ContainerStarted","Data":"ccd0635724c231b8451a0ebbc0f731a09d48561e8bebd2b52a48fe544c6e00e5"} Oct 03 17:10:18 crc kubenswrapper[5081]: I1003 17:10:18.381137 5081 generic.go:334] "Generic (PLEG): container finished" podID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerID="8563384337f9d0433ee70252cdf3eafa306f54b7a3ad9ba320231c6a25526852" exitCode=0 Oct 03 17:10:18 crc kubenswrapper[5081]: I1003 17:10:18.381227 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0cbd888b-86fb-4803-ae3a-65361d9eec55","Type":"ContainerDied","Data":"8563384337f9d0433ee70252cdf3eafa306f54b7a3ad9ba320231c6a25526852"} Oct 03 17:10:19 crc kubenswrapper[5081]: I1003 17:10:19.391335 5081 generic.go:334] "Generic (PLEG): container finished" podID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerID="628a28388f28676e53095920db723214ae442bb97698a0b3fb656802895ecd19" exitCode=0 Oct 03 17:10:19 crc kubenswrapper[5081]: I1003 17:10:19.391382 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd","Type":"ContainerDied","Data":"628a28388f28676e53095920db723214ae442bb97698a0b3fb656802895ecd19"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.027014 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.094423 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shcfb\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-kube-api-access-shcfb\") pod \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.094482 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-scripts\") pod \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.094588 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-ceph\") pod \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.094618 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-logs\") pod \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.094663 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-httpd-run\") pod \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.094729 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-config-data\") pod \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.094885 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-combined-ca-bundle\") pod \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\" (UID: \"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd\") " Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.098215 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" (UID: "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.098440 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-logs" (OuterVolumeSpecName: "logs") pod "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" (UID: "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.101829 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-kube-api-access-shcfb" (OuterVolumeSpecName: "kube-api-access-shcfb") pod "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" (UID: "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd"). InnerVolumeSpecName "kube-api-access-shcfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.113857 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-ceph" (OuterVolumeSpecName: "ceph") pod "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" (UID: "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.113966 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-scripts" (OuterVolumeSpecName: "scripts") pod "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" (UID: "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.151863 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" (UID: "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.161851 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-config-data" (OuterVolumeSpecName: "config-data") pod "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" (UID: "4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.208211 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.208244 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.208256 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.208267 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.208279 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.208291 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shcfb\" (UniqueName: \"kubernetes.io/projected/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-kube-api-access-shcfb\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.208302 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.438315 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd","Type":"ContainerDied","Data":"315de9083df5fba301c1adaa662e561cfd934cf05345f22e6d3e675b0bba08b9"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.438389 5081 scope.go:117] "RemoveContainer" containerID="628a28388f28676e53095920db723214ae442bb97698a0b3fb656802895ecd19" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.438443 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.449979 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fdd9b66bf-qkn6b" event={"ID":"8cdfb721-1db5-4240-8ef8-46870fbc84ed","Type":"ContainerStarted","Data":"1f6700a73c927a287d9702cafb175792c22e14e4023b038774dbcca8cfa16946"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.450036 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fdd9b66bf-qkn6b" event={"ID":"8cdfb721-1db5-4240-8ef8-46870fbc84ed","Type":"ContainerStarted","Data":"f1e67e11cb2083a24dc47a97cc4dbdc3f57c172c484a755f58ac6757f43d61ab"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.450240 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-fdd9b66bf-qkn6b" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon-log" containerID="cri-o://f1e67e11cb2083a24dc47a97cc4dbdc3f57c172c484a755f58ac6757f43d61ab" gracePeriod=30 Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.450294 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-fdd9b66bf-qkn6b" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon" containerID="cri-o://1f6700a73c927a287d9702cafb175792c22e14e4023b038774dbcca8cfa16946" gracePeriod=30 Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.453122 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-89cbfd487-5hdbz" event={"ID":"5692d3e9-857c-42bb-acbc-68142e710415","Type":"ContainerStarted","Data":"d65accdacb0fc8da7399ac70c0431da583b8ed4b4339982df558a82a2174a0f9"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.453148 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-89cbfd487-5hdbz" event={"ID":"5692d3e9-857c-42bb-acbc-68142e710415","Type":"ContainerStarted","Data":"244e9d2b203d71adb49760c4948166a58535d5be74f4db2a4062233851374648"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.460166 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d89b49f7-wtlz7" event={"ID":"6fdad3f4-cb8a-4632-9469-8dad640c88f5","Type":"ContainerStarted","Data":"3a08d02a5e376a0adc4ff9f0f845b5b9e2b015fa4c86b3a0348db648c4cb14e0"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.460196 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d89b49f7-wtlz7" event={"ID":"6fdad3f4-cb8a-4632-9469-8dad640c88f5","Type":"ContainerStarted","Data":"acee74271c0f74ffc9080306c650e07cad38a254d5554a8f7bbbf5e5ae1f4aa2"} Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.480825 5081 scope.go:117] "RemoveContainer" containerID="acdc661f2e244c0c065a5bc97bdb9db20f5c485759f7b2486ab3cae7b7456a2a" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.481861 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-fdd9b66bf-qkn6b" podStartSLOduration=1.8515141210000001 podStartE2EDuration="8.480564289s" podCreationTimestamp="2025-10-03 17:10:15 +0000 UTC" firstStartedPulling="2025-10-03 17:10:16.030485332 +0000 UTC m=+6134.996041935" lastFinishedPulling="2025-10-03 17:10:22.65953549 +0000 UTC m=+6141.625092103" observedRunningTime="2025-10-03 17:10:23.469899212 +0000 UTC m=+6142.435455835" watchObservedRunningTime="2025-10-03 17:10:23.480564289 +0000 UTC m=+6142.446120902" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.490253 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-77d89b49f7-wtlz7" podStartSLOduration=2.026957549 podStartE2EDuration="8.490242458s" podCreationTimestamp="2025-10-03 17:10:15 +0000 UTC" firstStartedPulling="2025-10-03 17:10:16.18961483 +0000 UTC m=+6135.155171433" lastFinishedPulling="2025-10-03 17:10:22.652899729 +0000 UTC m=+6141.618456342" observedRunningTime="2025-10-03 17:10:23.489803375 +0000 UTC m=+6142.455359988" watchObservedRunningTime="2025-10-03 17:10:23.490242458 +0000 UTC m=+6142.455799071" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.530670 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.594485 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.618921 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:10:23 crc kubenswrapper[5081]: E1003 17:10:23.619701 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-log" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.619727 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-log" Oct 03 17:10:23 crc kubenswrapper[5081]: E1003 17:10:23.619777 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-httpd" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.619784 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-httpd" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.622937 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-log" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.622968 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" containerName="glance-httpd" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.627231 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.631473 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.643362 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-89cbfd487-5hdbz" podStartSLOduration=2.545207007 podStartE2EDuration="8.643341542s" podCreationTimestamp="2025-10-03 17:10:15 +0000 UTC" firstStartedPulling="2025-10-03 17:10:16.602619831 +0000 UTC m=+6135.568176444" lastFinishedPulling="2025-10-03 17:10:22.700754366 +0000 UTC m=+6141.666310979" observedRunningTime="2025-10-03 17:10:23.535983043 +0000 UTC m=+6142.501539666" watchObservedRunningTime="2025-10-03 17:10:23.643341542 +0000 UTC m=+6142.608898155" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.692228 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.732227 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-logs\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.732431 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzxpm\" (UniqueName: \"kubernetes.io/projected/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-kube-api-access-zzxpm\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.732492 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-config-data\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.732537 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-ceph\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.732585 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-scripts\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.732723 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.732828 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.834225 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.834322 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-logs\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.834376 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzxpm\" (UniqueName: \"kubernetes.io/projected/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-kube-api-access-zzxpm\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.834395 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-config-data\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.834412 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-ceph\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.834429 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-scripts\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.834466 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.838660 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.838999 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-logs\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.839688 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.844059 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-ceph\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.844514 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-scripts\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.844658 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd" path="/var/lib/kubelet/pods/4a43ee1d-14ae-45e8-a9cf-dc7e8199fccd/volumes" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.848379 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-config-data\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.857926 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzxpm\" (UniqueName: \"kubernetes.io/projected/3a080e0a-e9a0-41a5-b0e1-600b6e4d854a-kube-api-access-zzxpm\") pod \"glance-default-external-api-0\" (UID: \"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a\") " pod="openstack/glance-default-external-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.934460 5081 scope.go:117] "RemoveContainer" containerID="cf2587229e62133cd001c332432fccd4d5f671c55b45a6302353e75452b7ee4f" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.948795 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:23 crc kubenswrapper[5081]: I1003 17:10:23.951587 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:23.996411 5081 scope.go:117] "RemoveContainer" containerID="536c583b39cea92bd745c9a883de4dd50d0725adcb64a94c39d26664abf58dc1" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.037604 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfzlr\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-kube-api-access-qfzlr\") pod \"0cbd888b-86fb-4803-ae3a-65361d9eec55\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.037665 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-httpd-run\") pod \"0cbd888b-86fb-4803-ae3a-65361d9eec55\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.037765 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-combined-ca-bundle\") pod \"0cbd888b-86fb-4803-ae3a-65361d9eec55\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.037897 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-logs\") pod \"0cbd888b-86fb-4803-ae3a-65361d9eec55\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.038011 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-config-data\") pod \"0cbd888b-86fb-4803-ae3a-65361d9eec55\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.038052 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-ceph\") pod \"0cbd888b-86fb-4803-ae3a-65361d9eec55\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.038075 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-scripts\") pod \"0cbd888b-86fb-4803-ae3a-65361d9eec55\" (UID: \"0cbd888b-86fb-4803-ae3a-65361d9eec55\") " Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.038466 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0cbd888b-86fb-4803-ae3a-65361d9eec55" (UID: "0cbd888b-86fb-4803-ae3a-65361d9eec55"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.038665 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-logs" (OuterVolumeSpecName: "logs") pod "0cbd888b-86fb-4803-ae3a-65361d9eec55" (UID: "0cbd888b-86fb-4803-ae3a-65361d9eec55"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.040683 5081 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.040713 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cbd888b-86fb-4803-ae3a-65361d9eec55-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.046057 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-kube-api-access-qfzlr" (OuterVolumeSpecName: "kube-api-access-qfzlr") pod "0cbd888b-86fb-4803-ae3a-65361d9eec55" (UID: "0cbd888b-86fb-4803-ae3a-65361d9eec55"). InnerVolumeSpecName "kube-api-access-qfzlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.047568 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-scripts" (OuterVolumeSpecName: "scripts") pod "0cbd888b-86fb-4803-ae3a-65361d9eec55" (UID: "0cbd888b-86fb-4803-ae3a-65361d9eec55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.048958 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-ceph" (OuterVolumeSpecName: "ceph") pod "0cbd888b-86fb-4803-ae3a-65361d9eec55" (UID: "0cbd888b-86fb-4803-ae3a-65361d9eec55"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.092147 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0cbd888b-86fb-4803-ae3a-65361d9eec55" (UID: "0cbd888b-86fb-4803-ae3a-65361d9eec55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.094763 5081 scope.go:117] "RemoveContainer" containerID="8563384337f9d0433ee70252cdf3eafa306f54b7a3ad9ba320231c6a25526852" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.130929 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-config-data" (OuterVolumeSpecName: "config-data") pod "0cbd888b-86fb-4803-ae3a-65361d9eec55" (UID: "0cbd888b-86fb-4803-ae3a-65361d9eec55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.137002 5081 scope.go:117] "RemoveContainer" containerID="107bb531a4cf8ac5743763378b7172869d708b68d103ab81d161b70d6abcec8b" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.149781 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.149812 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.149820 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.149830 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfzlr\" (UniqueName: \"kubernetes.io/projected/0cbd888b-86fb-4803-ae3a-65361d9eec55-kube-api-access-qfzlr\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.149840 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd888b-86fb-4803-ae3a-65361d9eec55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.210976 5081 scope.go:117] "RemoveContainer" containerID="ce9b86c284fcfd2f798dcab1b7c5d0e54151ae875251be0230d8c0c496ad2330" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.270647 5081 scope.go:117] "RemoveContainer" containerID="3f2dd09e49fc5b94d12155a63b28164fc2c7ea42db1878861b3709bdf466eb9b" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.336473 5081 scope.go:117] "RemoveContainer" containerID="1bcca2f2a9bb1086f916a6cb1c1c1701092af01c703518c1fd720876f7f11fde" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.468415 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0cbd888b-86fb-4803-ae3a-65361d9eec55","Type":"ContainerDied","Data":"f6ed16d31060087df6805d3431df6e33e3e171002b87e26e8c1618f732a8f961"} Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.473431 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.527262 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.541203 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.557585 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:10:24 crc kubenswrapper[5081]: E1003 17:10:24.575506 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-log" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.575532 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-log" Oct 03 17:10:24 crc kubenswrapper[5081]: E1003 17:10:24.575552 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-httpd" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.575561 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-httpd" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.575769 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-httpd" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.575791 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" containerName="glance-log" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.576938 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.583853 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.608037 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.663789 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfs2q\" (UniqueName: \"kubernetes.io/projected/e7eb7b30-2efc-4e38-b64e-61ea399c7303-kube-api-access-qfs2q\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.663855 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.663871 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7eb7b30-2efc-4e38-b64e-61ea399c7303-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.663913 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.663947 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e7eb7b30-2efc-4e38-b64e-61ea399c7303-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.664029 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7eb7b30-2efc-4e38-b64e-61ea399c7303-logs\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.664049 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.765783 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7eb7b30-2efc-4e38-b64e-61ea399c7303-logs\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.765829 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.765859 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfs2q\" (UniqueName: \"kubernetes.io/projected/e7eb7b30-2efc-4e38-b64e-61ea399c7303-kube-api-access-qfs2q\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.765899 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7eb7b30-2efc-4e38-b64e-61ea399c7303-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.765923 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.765963 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.765997 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e7eb7b30-2efc-4e38-b64e-61ea399c7303-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.767782 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7eb7b30-2efc-4e38-b64e-61ea399c7303-logs\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.769532 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7eb7b30-2efc-4e38-b64e-61ea399c7303-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.775705 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.780247 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.781250 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7eb7b30-2efc-4e38-b64e-61ea399c7303-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.786284 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e7eb7b30-2efc-4e38-b64e-61ea399c7303-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.798299 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfs2q\" (UniqueName: \"kubernetes.io/projected/e7eb7b30-2efc-4e38-b64e-61ea399c7303-kube-api-access-qfs2q\") pod \"glance-default-internal-api-0\" (UID: \"e7eb7b30-2efc-4e38-b64e-61ea399c7303\") " pod="openstack/glance-default-internal-api-0" Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.841411 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 17:10:24 crc kubenswrapper[5081]: I1003 17:10:24.903960 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:25 crc kubenswrapper[5081]: I1003 17:10:25.371922 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:25 crc kubenswrapper[5081]: I1003 17:10:25.494486 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:25 crc kubenswrapper[5081]: I1003 17:10:25.494585 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:25 crc kubenswrapper[5081]: I1003 17:10:25.498510 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a","Type":"ContainerStarted","Data":"79975156490b8bdd6952f0e45a7514f792f10d9098d320e54396c69fe55f21e8"} Oct 03 17:10:25 crc kubenswrapper[5081]: I1003 17:10:25.560534 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 17:10:25 crc kubenswrapper[5081]: I1003 17:10:25.839866 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cbd888b-86fb-4803-ae3a-65361d9eec55" path="/var/lib/kubelet/pods/0cbd888b-86fb-4803-ae3a-65361d9eec55/volumes" Oct 03 17:10:26 crc kubenswrapper[5081]: I1003 17:10:26.143635 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:26 crc kubenswrapper[5081]: I1003 17:10:26.143688 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:26 crc kubenswrapper[5081]: I1003 17:10:26.529234 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e7eb7b30-2efc-4e38-b64e-61ea399c7303","Type":"ContainerStarted","Data":"18647f1cbf1613ea3acd2e2b730655bbda042c069354d5fed504f5a59843647d"} Oct 03 17:10:26 crc kubenswrapper[5081]: I1003 17:10:26.529588 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e7eb7b30-2efc-4e38-b64e-61ea399c7303","Type":"ContainerStarted","Data":"bd811b0240526320f3c318023cacc854ad9b1c6cef657662bab4f4374003d91f"} Oct 03 17:10:26 crc kubenswrapper[5081]: I1003 17:10:26.535364 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a","Type":"ContainerStarted","Data":"36ccc3e66d30db7a2976c9aee928e6c5b15cbc2c8443f3b4b8ddb0be9ac99be6"} Oct 03 17:10:27 crc kubenswrapper[5081]: I1003 17:10:27.552149 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3a080e0a-e9a0-41a5-b0e1-600b6e4d854a","Type":"ContainerStarted","Data":"f855d4e6737258865be51db9f2d9da275cec40f11bb9a54bfb704b7591e20fda"} Oct 03 17:10:27 crc kubenswrapper[5081]: I1003 17:10:27.559462 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e7eb7b30-2efc-4e38-b64e-61ea399c7303","Type":"ContainerStarted","Data":"e69ecef6d480da6786869e04369b03d683c2c834e1d151b41d46f34a004320da"} Oct 03 17:10:27 crc kubenswrapper[5081]: I1003 17:10:27.589237 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.589218293 podStartE2EDuration="4.589218293s" podCreationTimestamp="2025-10-03 17:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:10:27.574412077 +0000 UTC m=+6146.539968710" watchObservedRunningTime="2025-10-03 17:10:27.589218293 +0000 UTC m=+6146.554774896" Oct 03 17:10:27 crc kubenswrapper[5081]: I1003 17:10:27.609819 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.609797165 podStartE2EDuration="3.609797165s" podCreationTimestamp="2025-10-03 17:10:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:10:27.597990655 +0000 UTC m=+6146.563547278" watchObservedRunningTime="2025-10-03 17:10:27.609797165 +0000 UTC m=+6146.575353798" Oct 03 17:10:30 crc kubenswrapper[5081]: I1003 17:10:30.647463 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:10:30 crc kubenswrapper[5081]: I1003 17:10:30.648296 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:10:33 crc kubenswrapper[5081]: I1003 17:10:33.952618 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 17:10:33 crc kubenswrapper[5081]: I1003 17:10:33.953239 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 17:10:33 crc kubenswrapper[5081]: I1003 17:10:33.987130 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 17:10:34 crc kubenswrapper[5081]: I1003 17:10:34.000282 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 17:10:34 crc kubenswrapper[5081]: I1003 17:10:34.647447 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 17:10:34 crc kubenswrapper[5081]: I1003 17:10:34.647535 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 17:10:34 crc kubenswrapper[5081]: I1003 17:10:34.905082 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:34 crc kubenswrapper[5081]: I1003 17:10:34.905532 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:34 crc kubenswrapper[5081]: I1003 17:10:34.948884 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:34 crc kubenswrapper[5081]: I1003 17:10:34.970184 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:35 crc kubenswrapper[5081]: I1003 17:10:35.498333 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-77d89b49f7-wtlz7" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Oct 03 17:10:35 crc kubenswrapper[5081]: I1003 17:10:35.656041 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:35 crc kubenswrapper[5081]: I1003 17:10:35.656089 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:36 crc kubenswrapper[5081]: I1003 17:10:36.147629 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-89cbfd487-5hdbz" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Oct 03 17:10:36 crc kubenswrapper[5081]: I1003 17:10:36.923683 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 17:10:36 crc kubenswrapper[5081]: I1003 17:10:36.923779 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 17:10:37 crc kubenswrapper[5081]: I1003 17:10:37.018071 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 17:10:37 crc kubenswrapper[5081]: I1003 17:10:37.060698 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-k4rrp"] Oct 03 17:10:37 crc kubenswrapper[5081]: I1003 17:10:37.071234 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-k4rrp"] Oct 03 17:10:37 crc kubenswrapper[5081]: I1003 17:10:37.795853 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:37 crc kubenswrapper[5081]: I1003 17:10:37.797189 5081 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 17:10:37 crc kubenswrapper[5081]: I1003 17:10:37.808702 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 17:10:37 crc kubenswrapper[5081]: I1003 17:10:37.841669 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06474819-0ce0-4b8d-9cf9-460b4c452103" path="/var/lib/kubelet/pods/06474819-0ce0-4b8d-9cf9-460b4c452103/volumes" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.430614 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pvkpx"] Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.433330 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.450882 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pvkpx"] Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.576736 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-utilities\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.577502 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-catalog-content\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.577899 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z4nj\" (UniqueName: \"kubernetes.io/projected/ac277915-b90c-4dfc-a51a-4a5d885398bf-kube-api-access-2z4nj\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.682087 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z4nj\" (UniqueName: \"kubernetes.io/projected/ac277915-b90c-4dfc-a51a-4a5d885398bf-kube-api-access-2z4nj\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.682153 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-utilities\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.682222 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-catalog-content\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.682884 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-catalog-content\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.683477 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-utilities\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.713229 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z4nj\" (UniqueName: \"kubernetes.io/projected/ac277915-b90c-4dfc-a51a-4a5d885398bf-kube-api-access-2z4nj\") pod \"redhat-marketplace-pvkpx\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:38 crc kubenswrapper[5081]: I1003 17:10:38.760937 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:39 crc kubenswrapper[5081]: I1003 17:10:39.311851 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pvkpx"] Oct 03 17:10:39 crc kubenswrapper[5081]: I1003 17:10:39.696195 5081 generic.go:334] "Generic (PLEG): container finished" podID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerID="c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934" exitCode=0 Oct 03 17:10:39 crc kubenswrapper[5081]: I1003 17:10:39.696247 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pvkpx" event={"ID":"ac277915-b90c-4dfc-a51a-4a5d885398bf","Type":"ContainerDied","Data":"c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934"} Oct 03 17:10:39 crc kubenswrapper[5081]: I1003 17:10:39.696463 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pvkpx" event={"ID":"ac277915-b90c-4dfc-a51a-4a5d885398bf","Type":"ContainerStarted","Data":"773cbeffed9e18841cbc4205ec050ced8fcd9af4ee80e8432fefa7a294c18023"} Oct 03 17:10:41 crc kubenswrapper[5081]: I1003 17:10:41.718682 5081 generic.go:334] "Generic (PLEG): container finished" podID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerID="a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f" exitCode=0 Oct 03 17:10:41 crc kubenswrapper[5081]: I1003 17:10:41.718746 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pvkpx" event={"ID":"ac277915-b90c-4dfc-a51a-4a5d885398bf","Type":"ContainerDied","Data":"a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f"} Oct 03 17:10:42 crc kubenswrapper[5081]: I1003 17:10:42.750510 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pvkpx" event={"ID":"ac277915-b90c-4dfc-a51a-4a5d885398bf","Type":"ContainerStarted","Data":"f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993"} Oct 03 17:10:42 crc kubenswrapper[5081]: I1003 17:10:42.778146 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pvkpx" podStartSLOduration=2.242261594 podStartE2EDuration="4.778101635s" podCreationTimestamp="2025-10-03 17:10:38 +0000 UTC" firstStartedPulling="2025-10-03 17:10:39.698181273 +0000 UTC m=+6158.663737886" lastFinishedPulling="2025-10-03 17:10:42.234021314 +0000 UTC m=+6161.199577927" observedRunningTime="2025-10-03 17:10:42.771189058 +0000 UTC m=+6161.736745671" watchObservedRunningTime="2025-10-03 17:10:42.778101635 +0000 UTC m=+6161.743658268" Oct 03 17:10:47 crc kubenswrapper[5081]: I1003 17:10:47.060837 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5bb4-account-create-btpfq"] Oct 03 17:10:47 crc kubenswrapper[5081]: I1003 17:10:47.071417 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5bb4-account-create-btpfq"] Oct 03 17:10:47 crc kubenswrapper[5081]: I1003 17:10:47.347656 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:47 crc kubenswrapper[5081]: I1003 17:10:47.842238 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f46a0c4-ea57-4029-8248-8324ce4bcac3" path="/var/lib/kubelet/pods/1f46a0c4-ea57-4029-8248-8324ce4bcac3/volumes" Oct 03 17:10:47 crc kubenswrapper[5081]: I1003 17:10:47.988137 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:48 crc kubenswrapper[5081]: I1003 17:10:48.761511 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:48 crc kubenswrapper[5081]: I1003 17:10:48.761548 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:48 crc kubenswrapper[5081]: I1003 17:10:48.820914 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:48 crc kubenswrapper[5081]: I1003 17:10:48.872181 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:49 crc kubenswrapper[5081]: I1003 17:10:49.066487 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:10:49 crc kubenswrapper[5081]: I1003 17:10:49.066897 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pvkpx"] Oct 03 17:10:49 crc kubenswrapper[5081]: I1003 17:10:49.689506 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:10:49 crc kubenswrapper[5081]: I1003 17:10:49.750738 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-77d89b49f7-wtlz7"] Oct 03 17:10:49 crc kubenswrapper[5081]: I1003 17:10:49.813115 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-77d89b49f7-wtlz7" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon-log" containerID="cri-o://acee74271c0f74ffc9080306c650e07cad38a254d5554a8f7bbbf5e5ae1f4aa2" gracePeriod=30 Oct 03 17:10:49 crc kubenswrapper[5081]: I1003 17:10:49.813289 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-77d89b49f7-wtlz7" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" containerID="cri-o://3a08d02a5e376a0adc4ff9f0f845b5b9e2b015fa4c86b3a0348db648c4cb14e0" gracePeriod=30 Oct 03 17:10:50 crc kubenswrapper[5081]: I1003 17:10:50.821072 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pvkpx" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="registry-server" containerID="cri-o://f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993" gracePeriod=2 Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.364305 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.457397 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-catalog-content\") pod \"ac277915-b90c-4dfc-a51a-4a5d885398bf\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.457467 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-utilities\") pod \"ac277915-b90c-4dfc-a51a-4a5d885398bf\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.457742 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z4nj\" (UniqueName: \"kubernetes.io/projected/ac277915-b90c-4dfc-a51a-4a5d885398bf-kube-api-access-2z4nj\") pod \"ac277915-b90c-4dfc-a51a-4a5d885398bf\" (UID: \"ac277915-b90c-4dfc-a51a-4a5d885398bf\") " Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.458339 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-utilities" (OuterVolumeSpecName: "utilities") pod "ac277915-b90c-4dfc-a51a-4a5d885398bf" (UID: "ac277915-b90c-4dfc-a51a-4a5d885398bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.463377 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac277915-b90c-4dfc-a51a-4a5d885398bf-kube-api-access-2z4nj" (OuterVolumeSpecName: "kube-api-access-2z4nj") pod "ac277915-b90c-4dfc-a51a-4a5d885398bf" (UID: "ac277915-b90c-4dfc-a51a-4a5d885398bf"). InnerVolumeSpecName "kube-api-access-2z4nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.468442 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac277915-b90c-4dfc-a51a-4a5d885398bf" (UID: "ac277915-b90c-4dfc-a51a-4a5d885398bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.559735 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.559774 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac277915-b90c-4dfc-a51a-4a5d885398bf-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.559783 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z4nj\" (UniqueName: \"kubernetes.io/projected/ac277915-b90c-4dfc-a51a-4a5d885398bf-kube-api-access-2z4nj\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.833017 5081 generic.go:334] "Generic (PLEG): container finished" podID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerID="f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993" exitCode=0 Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.835491 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pvkpx" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.839826 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pvkpx" event={"ID":"ac277915-b90c-4dfc-a51a-4a5d885398bf","Type":"ContainerDied","Data":"f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993"} Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.839873 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pvkpx" event={"ID":"ac277915-b90c-4dfc-a51a-4a5d885398bf","Type":"ContainerDied","Data":"773cbeffed9e18841cbc4205ec050ced8fcd9af4ee80e8432fefa7a294c18023"} Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.839891 5081 scope.go:117] "RemoveContainer" containerID="f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.863091 5081 scope.go:117] "RemoveContainer" containerID="a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.878847 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pvkpx"] Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.886763 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pvkpx"] Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.887993 5081 scope.go:117] "RemoveContainer" containerID="c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.944385 5081 scope.go:117] "RemoveContainer" containerID="f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993" Oct 03 17:10:51 crc kubenswrapper[5081]: E1003 17:10:51.944852 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993\": container with ID starting with f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993 not found: ID does not exist" containerID="f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.944884 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993"} err="failed to get container status \"f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993\": rpc error: code = NotFound desc = could not find container \"f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993\": container with ID starting with f6da53bb37b57d81dd13f0328a814387532329313559ca036e0eba262487e993 not found: ID does not exist" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.944906 5081 scope.go:117] "RemoveContainer" containerID="a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f" Oct 03 17:10:51 crc kubenswrapper[5081]: E1003 17:10:51.945162 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f\": container with ID starting with a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f not found: ID does not exist" containerID="a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.945194 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f"} err="failed to get container status \"a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f\": rpc error: code = NotFound desc = could not find container \"a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f\": container with ID starting with a51fea2a62593b04267a525270a116d856c0acadf7ad92e6a58bc3eba0eee17f not found: ID does not exist" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.945212 5081 scope.go:117] "RemoveContainer" containerID="c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934" Oct 03 17:10:51 crc kubenswrapper[5081]: E1003 17:10:51.945981 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934\": container with ID starting with c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934 not found: ID does not exist" containerID="c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934" Oct 03 17:10:51 crc kubenswrapper[5081]: I1003 17:10:51.946007 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934"} err="failed to get container status \"c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934\": rpc error: code = NotFound desc = could not find container \"c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934\": container with ID starting with c15608838daf4efa9342ce156e70879e994c7a6b3c476efcdff8c47655cb6934 not found: ID does not exist" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.841348 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" path="/var/lib/kubelet/pods/ac277915-b90c-4dfc-a51a-4a5d885398bf/volumes" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.863506 5081 generic.go:334] "Generic (PLEG): container finished" podID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerID="3a08d02a5e376a0adc4ff9f0f845b5b9e2b015fa4c86b3a0348db648c4cb14e0" exitCode=0 Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.863628 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d89b49f7-wtlz7" event={"ID":"6fdad3f4-cb8a-4632-9469-8dad640c88f5","Type":"ContainerDied","Data":"3a08d02a5e376a0adc4ff9f0f845b5b9e2b015fa4c86b3a0348db648c4cb14e0"} Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.866222 5081 generic.go:334] "Generic (PLEG): container finished" podID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerID="1f6700a73c927a287d9702cafb175792c22e14e4023b038774dbcca8cfa16946" exitCode=137 Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.866255 5081 generic.go:334] "Generic (PLEG): container finished" podID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerID="f1e67e11cb2083a24dc47a97cc4dbdc3f57c172c484a755f58ac6757f43d61ab" exitCode=137 Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.866279 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fdd9b66bf-qkn6b" event={"ID":"8cdfb721-1db5-4240-8ef8-46870fbc84ed","Type":"ContainerDied","Data":"1f6700a73c927a287d9702cafb175792c22e14e4023b038774dbcca8cfa16946"} Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.866311 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fdd9b66bf-qkn6b" event={"ID":"8cdfb721-1db5-4240-8ef8-46870fbc84ed","Type":"ContainerDied","Data":"f1e67e11cb2083a24dc47a97cc4dbdc3f57c172c484a755f58ac6757f43d61ab"} Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.866321 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fdd9b66bf-qkn6b" event={"ID":"8cdfb721-1db5-4240-8ef8-46870fbc84ed","Type":"ContainerDied","Data":"f2361354f120e8e4ce635034e63e0fe61e7be19fbdf353ed19c7170c02f30344"} Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.866334 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2361354f120e8e4ce635034e63e0fe61e7be19fbdf353ed19c7170c02f30344" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.881592 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.906724 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdfb721-1db5-4240-8ef8-46870fbc84ed-logs\") pod \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.906883 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-scripts\") pod \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.906987 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2lt7\" (UniqueName: \"kubernetes.io/projected/8cdfb721-1db5-4240-8ef8-46870fbc84ed-kube-api-access-n2lt7\") pod \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.907018 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdfb721-1db5-4240-8ef8-46870fbc84ed-horizon-secret-key\") pod \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.907103 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-config-data\") pod \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\" (UID: \"8cdfb721-1db5-4240-8ef8-46870fbc84ed\") " Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.907196 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cdfb721-1db5-4240-8ef8-46870fbc84ed-logs" (OuterVolumeSpecName: "logs") pod "8cdfb721-1db5-4240-8ef8-46870fbc84ed" (UID: "8cdfb721-1db5-4240-8ef8-46870fbc84ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.907795 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cdfb721-1db5-4240-8ef8-46870fbc84ed-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.914783 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cdfb721-1db5-4240-8ef8-46870fbc84ed-kube-api-access-n2lt7" (OuterVolumeSpecName: "kube-api-access-n2lt7") pod "8cdfb721-1db5-4240-8ef8-46870fbc84ed" (UID: "8cdfb721-1db5-4240-8ef8-46870fbc84ed"). InnerVolumeSpecName "kube-api-access-n2lt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.914835 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdfb721-1db5-4240-8ef8-46870fbc84ed-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8cdfb721-1db5-4240-8ef8-46870fbc84ed" (UID: "8cdfb721-1db5-4240-8ef8-46870fbc84ed"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.934350 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-config-data" (OuterVolumeSpecName: "config-data") pod "8cdfb721-1db5-4240-8ef8-46870fbc84ed" (UID: "8cdfb721-1db5-4240-8ef8-46870fbc84ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:10:53 crc kubenswrapper[5081]: I1003 17:10:53.935491 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-scripts" (OuterVolumeSpecName: "scripts") pod "8cdfb721-1db5-4240-8ef8-46870fbc84ed" (UID: "8cdfb721-1db5-4240-8ef8-46870fbc84ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:10:54 crc kubenswrapper[5081]: I1003 17:10:54.009915 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2lt7\" (UniqueName: \"kubernetes.io/projected/8cdfb721-1db5-4240-8ef8-46870fbc84ed-kube-api-access-n2lt7\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:54 crc kubenswrapper[5081]: I1003 17:10:54.009969 5081 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cdfb721-1db5-4240-8ef8-46870fbc84ed-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:54 crc kubenswrapper[5081]: I1003 17:10:54.009979 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:54 crc kubenswrapper[5081]: I1003 17:10:54.009990 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cdfb721-1db5-4240-8ef8-46870fbc84ed-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:10:54 crc kubenswrapper[5081]: I1003 17:10:54.880834 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fdd9b66bf-qkn6b" Oct 03 17:10:54 crc kubenswrapper[5081]: I1003 17:10:54.931005 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-fdd9b66bf-qkn6b"] Oct 03 17:10:54 crc kubenswrapper[5081]: I1003 17:10:54.945114 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-fdd9b66bf-qkn6b"] Oct 03 17:10:55 crc kubenswrapper[5081]: I1003 17:10:55.495647 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-77d89b49f7-wtlz7" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Oct 03 17:10:55 crc kubenswrapper[5081]: I1003 17:10:55.842894 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" path="/var/lib/kubelet/pods/8cdfb721-1db5-4240-8ef8-46870fbc84ed/volumes" Oct 03 17:10:57 crc kubenswrapper[5081]: I1003 17:10:57.045898 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-s9rss"] Oct 03 17:10:57 crc kubenswrapper[5081]: I1003 17:10:57.056808 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-s9rss"] Oct 03 17:10:57 crc kubenswrapper[5081]: I1003 17:10:57.839706 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ac283c5-027d-4cf6-be4a-8eb9451c2fb4" path="/var/lib/kubelet/pods/2ac283c5-027d-4cf6-be4a-8eb9451c2fb4/volumes" Oct 03 17:11:00 crc kubenswrapper[5081]: I1003 17:11:00.647716 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:11:00 crc kubenswrapper[5081]: I1003 17:11:00.648032 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:11:05 crc kubenswrapper[5081]: I1003 17:11:05.495330 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-77d89b49f7-wtlz7" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Oct 03 17:11:15 crc kubenswrapper[5081]: I1003 17:11:15.495478 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-77d89b49f7-wtlz7" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Oct 03 17:11:15 crc kubenswrapper[5081]: I1003 17:11:15.496118 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.104030 5081 generic.go:334] "Generic (PLEG): container finished" podID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerID="acee74271c0f74ffc9080306c650e07cad38a254d5554a8f7bbbf5e5ae1f4aa2" exitCode=137 Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.104113 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d89b49f7-wtlz7" event={"ID":"6fdad3f4-cb8a-4632-9469-8dad640c88f5","Type":"ContainerDied","Data":"acee74271c0f74ffc9080306c650e07cad38a254d5554a8f7bbbf5e5ae1f4aa2"} Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.226805 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.400770 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-scripts\") pod \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.400913 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-config-data\") pod \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.400964 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpfc2\" (UniqueName: \"kubernetes.io/projected/6fdad3f4-cb8a-4632-9469-8dad640c88f5-kube-api-access-hpfc2\") pod \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.401032 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fdad3f4-cb8a-4632-9469-8dad640c88f5-horizon-secret-key\") pod \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.401107 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fdad3f4-cb8a-4632-9469-8dad640c88f5-logs\") pod \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\" (UID: \"6fdad3f4-cb8a-4632-9469-8dad640c88f5\") " Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.402048 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fdad3f4-cb8a-4632-9469-8dad640c88f5-logs" (OuterVolumeSpecName: "logs") pod "6fdad3f4-cb8a-4632-9469-8dad640c88f5" (UID: "6fdad3f4-cb8a-4632-9469-8dad640c88f5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.410089 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fdad3f4-cb8a-4632-9469-8dad640c88f5-kube-api-access-hpfc2" (OuterVolumeSpecName: "kube-api-access-hpfc2") pod "6fdad3f4-cb8a-4632-9469-8dad640c88f5" (UID: "6fdad3f4-cb8a-4632-9469-8dad640c88f5"). InnerVolumeSpecName "kube-api-access-hpfc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.419819 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fdad3f4-cb8a-4632-9469-8dad640c88f5-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "6fdad3f4-cb8a-4632-9469-8dad640c88f5" (UID: "6fdad3f4-cb8a-4632-9469-8dad640c88f5"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.454710 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-config-data" (OuterVolumeSpecName: "config-data") pod "6fdad3f4-cb8a-4632-9469-8dad640c88f5" (UID: "6fdad3f4-cb8a-4632-9469-8dad640c88f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.454948 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-scripts" (OuterVolumeSpecName: "scripts") pod "6fdad3f4-cb8a-4632-9469-8dad640c88f5" (UID: "6fdad3f4-cb8a-4632-9469-8dad640c88f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.503788 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpfc2\" (UniqueName: \"kubernetes.io/projected/6fdad3f4-cb8a-4632-9469-8dad640c88f5-kube-api-access-hpfc2\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.503822 5081 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6fdad3f4-cb8a-4632-9469-8dad640c88f5-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.503834 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fdad3f4-cb8a-4632-9469-8dad640c88f5-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.503842 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:20 crc kubenswrapper[5081]: I1003 17:11:20.503850 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fdad3f4-cb8a-4632-9469-8dad640c88f5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:21 crc kubenswrapper[5081]: I1003 17:11:21.123597 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d89b49f7-wtlz7" event={"ID":"6fdad3f4-cb8a-4632-9469-8dad640c88f5","Type":"ContainerDied","Data":"d8198dfa3cf1d64e13af604f6226c59fb7551e8ba1d8eb92e17bdb517ff1049b"} Oct 03 17:11:21 crc kubenswrapper[5081]: I1003 17:11:21.123651 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d89b49f7-wtlz7" Oct 03 17:11:21 crc kubenswrapper[5081]: I1003 17:11:21.123671 5081 scope.go:117] "RemoveContainer" containerID="3a08d02a5e376a0adc4ff9f0f845b5b9e2b015fa4c86b3a0348db648c4cb14e0" Oct 03 17:11:21 crc kubenswrapper[5081]: I1003 17:11:21.156404 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-77d89b49f7-wtlz7"] Oct 03 17:11:21 crc kubenswrapper[5081]: I1003 17:11:21.164386 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-77d89b49f7-wtlz7"] Oct 03 17:11:21 crc kubenswrapper[5081]: I1003 17:11:21.292881 5081 scope.go:117] "RemoveContainer" containerID="acee74271c0f74ffc9080306c650e07cad38a254d5554a8f7bbbf5e5ae1f4aa2" Oct 03 17:11:21 crc kubenswrapper[5081]: I1003 17:11:21.839465 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" path="/var/lib/kubelet/pods/6fdad3f4-cb8a-4632-9469-8dad640c88f5/volumes" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.646072 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-bdb759599-v7cc7"] Oct 03 17:11:23 crc kubenswrapper[5081]: E1003 17:11:23.646984 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon-log" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647005 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon-log" Oct 03 17:11:23 crc kubenswrapper[5081]: E1003 17:11:23.647027 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647035 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" Oct 03 17:11:23 crc kubenswrapper[5081]: E1003 17:11:23.647051 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="extract-content" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647060 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="extract-content" Oct 03 17:11:23 crc kubenswrapper[5081]: E1003 17:11:23.647077 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="extract-utilities" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647085 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="extract-utilities" Oct 03 17:11:23 crc kubenswrapper[5081]: E1003 17:11:23.647110 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon-log" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647117 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon-log" Oct 03 17:11:23 crc kubenswrapper[5081]: E1003 17:11:23.647126 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="registry-server" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647134 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="registry-server" Oct 03 17:11:23 crc kubenswrapper[5081]: E1003 17:11:23.647151 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647158 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647394 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac277915-b90c-4dfc-a51a-4a5d885398bf" containerName="registry-server" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647414 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon-log" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647430 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fdad3f4-cb8a-4632-9469-8dad640c88f5" containerName="horizon" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647443 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon-log" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.647458 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdfb721-1db5-4240-8ef8-46870fbc84ed" containerName="horizon" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.649737 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.657309 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bdb759599-v7cc7"] Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.676755 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d981dd8-ad4a-4483-a79b-7189f460f7b5-horizon-secret-key\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.677090 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d981dd8-ad4a-4483-a79b-7189f460f7b5-config-data\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.677227 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz9wf\" (UniqueName: \"kubernetes.io/projected/0d981dd8-ad4a-4483-a79b-7189f460f7b5-kube-api-access-tz9wf\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.677367 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d981dd8-ad4a-4483-a79b-7189f460f7b5-scripts\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.677413 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d981dd8-ad4a-4483-a79b-7189f460f7b5-logs\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.778636 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d981dd8-ad4a-4483-a79b-7189f460f7b5-scripts\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.778675 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d981dd8-ad4a-4483-a79b-7189f460f7b5-logs\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.778777 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d981dd8-ad4a-4483-a79b-7189f460f7b5-horizon-secret-key\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.778805 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d981dd8-ad4a-4483-a79b-7189f460f7b5-config-data\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.778831 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz9wf\" (UniqueName: \"kubernetes.io/projected/0d981dd8-ad4a-4483-a79b-7189f460f7b5-kube-api-access-tz9wf\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.779225 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d981dd8-ad4a-4483-a79b-7189f460f7b5-logs\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.779467 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d981dd8-ad4a-4483-a79b-7189f460f7b5-scripts\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.780547 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d981dd8-ad4a-4483-a79b-7189f460f7b5-config-data\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.796265 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d981dd8-ad4a-4483-a79b-7189f460f7b5-horizon-secret-key\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.799911 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz9wf\" (UniqueName: \"kubernetes.io/projected/0d981dd8-ad4a-4483-a79b-7189f460f7b5-kube-api-access-tz9wf\") pod \"horizon-bdb759599-v7cc7\" (UID: \"0d981dd8-ad4a-4483-a79b-7189f460f7b5\") " pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:23 crc kubenswrapper[5081]: I1003 17:11:23.972087 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:24 crc kubenswrapper[5081]: I1003 17:11:24.479254 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-bdb759599-v7cc7"] Oct 03 17:11:24 crc kubenswrapper[5081]: W1003 17:11:24.485698 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d981dd8_ad4a_4483_a79b_7189f460f7b5.slice/crio-86ed05e6d59215c954438263f6ed6e54608b811cb479bee52222ea93841b3a57 WatchSource:0}: Error finding container 86ed05e6d59215c954438263f6ed6e54608b811cb479bee52222ea93841b3a57: Status 404 returned error can't find the container with id 86ed05e6d59215c954438263f6ed6e54608b811cb479bee52222ea93841b3a57 Oct 03 17:11:24 crc kubenswrapper[5081]: I1003 17:11:24.516046 5081 scope.go:117] "RemoveContainer" containerID="49a96e0d646de58d1bb70d7c637f4c768f33c49f0a6859725d1e8e499831256d" Oct 03 17:11:24 crc kubenswrapper[5081]: I1003 17:11:24.555497 5081 scope.go:117] "RemoveContainer" containerID="c8af8c6d66b048793ea9902bd1071f10ac1988204d6f3b8d682ff5f014765f07" Oct 03 17:11:24 crc kubenswrapper[5081]: I1003 17:11:24.597338 5081 scope.go:117] "RemoveContainer" containerID="8f67d77fdc679b919c67c7d1fe8722f85fd52fd300d05b871dbc4651e6eeec65" Oct 03 17:11:24 crc kubenswrapper[5081]: I1003 17:11:24.901704 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-k4jsb"] Oct 03 17:11:24 crc kubenswrapper[5081]: I1003 17:11:24.903163 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-k4jsb" Oct 03 17:11:24 crc kubenswrapper[5081]: I1003 17:11:24.922459 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-k4jsb"] Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.004056 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zctc5\" (UniqueName: \"kubernetes.io/projected/09af7c8f-dcd4-4379-905e-ecf7234f9432-kube-api-access-zctc5\") pod \"heat-db-create-k4jsb\" (UID: \"09af7c8f-dcd4-4379-905e-ecf7234f9432\") " pod="openstack/heat-db-create-k4jsb" Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.106013 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zctc5\" (UniqueName: \"kubernetes.io/projected/09af7c8f-dcd4-4379-905e-ecf7234f9432-kube-api-access-zctc5\") pod \"heat-db-create-k4jsb\" (UID: \"09af7c8f-dcd4-4379-905e-ecf7234f9432\") " pod="openstack/heat-db-create-k4jsb" Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.122556 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zctc5\" (UniqueName: \"kubernetes.io/projected/09af7c8f-dcd4-4379-905e-ecf7234f9432-kube-api-access-zctc5\") pod \"heat-db-create-k4jsb\" (UID: \"09af7c8f-dcd4-4379-905e-ecf7234f9432\") " pod="openstack/heat-db-create-k4jsb" Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.167037 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bdb759599-v7cc7" event={"ID":"0d981dd8-ad4a-4483-a79b-7189f460f7b5","Type":"ContainerStarted","Data":"e65b2350ef8aa26ae2e062efb74926588c562530282918ebb2240332f9039a86"} Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.167077 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bdb759599-v7cc7" event={"ID":"0d981dd8-ad4a-4483-a79b-7189f460f7b5","Type":"ContainerStarted","Data":"33608b08c6ce57b3fbad63eee567a2433731404933ace3f333c4eb18f8a8da06"} Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.167088 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-bdb759599-v7cc7" event={"ID":"0d981dd8-ad4a-4483-a79b-7189f460f7b5","Type":"ContainerStarted","Data":"86ed05e6d59215c954438263f6ed6e54608b811cb479bee52222ea93841b3a57"} Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.229141 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-k4jsb" Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.777274 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-bdb759599-v7cc7" podStartSLOduration=2.77724771 podStartE2EDuration="2.77724771s" podCreationTimestamp="2025-10-03 17:11:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:11:25.187286124 +0000 UTC m=+6204.152842727" watchObservedRunningTime="2025-10-03 17:11:25.77724771 +0000 UTC m=+6204.742804323" Oct 03 17:11:25 crc kubenswrapper[5081]: I1003 17:11:25.779978 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-k4jsb"] Oct 03 17:11:26 crc kubenswrapper[5081]: I1003 17:11:26.054503 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-rd4fq"] Oct 03 17:11:26 crc kubenswrapper[5081]: I1003 17:11:26.067281 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-rd4fq"] Oct 03 17:11:26 crc kubenswrapper[5081]: I1003 17:11:26.177617 5081 generic.go:334] "Generic (PLEG): container finished" podID="09af7c8f-dcd4-4379-905e-ecf7234f9432" containerID="e3c20c1fc41fbd5c2894ff3e9d7d716afb06768bb159497319e596b23b726c1d" exitCode=0 Oct 03 17:11:26 crc kubenswrapper[5081]: I1003 17:11:26.177731 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-k4jsb" event={"ID":"09af7c8f-dcd4-4379-905e-ecf7234f9432","Type":"ContainerDied","Data":"e3c20c1fc41fbd5c2894ff3e9d7d716afb06768bb159497319e596b23b726c1d"} Oct 03 17:11:26 crc kubenswrapper[5081]: I1003 17:11:26.177961 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-k4jsb" event={"ID":"09af7c8f-dcd4-4379-905e-ecf7234f9432","Type":"ContainerStarted","Data":"8ff72e8a2b25ea4ef040d3daf28245d01b80df3537ab5cf8cf81febaecd05367"} Oct 03 17:11:27 crc kubenswrapper[5081]: I1003 17:11:27.530473 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-k4jsb" Oct 03 17:11:27 crc kubenswrapper[5081]: I1003 17:11:27.658790 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zctc5\" (UniqueName: \"kubernetes.io/projected/09af7c8f-dcd4-4379-905e-ecf7234f9432-kube-api-access-zctc5\") pod \"09af7c8f-dcd4-4379-905e-ecf7234f9432\" (UID: \"09af7c8f-dcd4-4379-905e-ecf7234f9432\") " Oct 03 17:11:27 crc kubenswrapper[5081]: I1003 17:11:27.676378 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09af7c8f-dcd4-4379-905e-ecf7234f9432-kube-api-access-zctc5" (OuterVolumeSpecName: "kube-api-access-zctc5") pod "09af7c8f-dcd4-4379-905e-ecf7234f9432" (UID: "09af7c8f-dcd4-4379-905e-ecf7234f9432"). InnerVolumeSpecName "kube-api-access-zctc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:11:27 crc kubenswrapper[5081]: I1003 17:11:27.761752 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zctc5\" (UniqueName: \"kubernetes.io/projected/09af7c8f-dcd4-4379-905e-ecf7234f9432-kube-api-access-zctc5\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:27 crc kubenswrapper[5081]: I1003 17:11:27.840001 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1df51ea5-3d03-4c89-82e9-10bc1b10b35c" path="/var/lib/kubelet/pods/1df51ea5-3d03-4c89-82e9-10bc1b10b35c/volumes" Oct 03 17:11:28 crc kubenswrapper[5081]: I1003 17:11:28.194823 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-k4jsb" event={"ID":"09af7c8f-dcd4-4379-905e-ecf7234f9432","Type":"ContainerDied","Data":"8ff72e8a2b25ea4ef040d3daf28245d01b80df3537ab5cf8cf81febaecd05367"} Oct 03 17:11:28 crc kubenswrapper[5081]: I1003 17:11:28.195114 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ff72e8a2b25ea4ef040d3daf28245d01b80df3537ab5cf8cf81febaecd05367" Oct 03 17:11:28 crc kubenswrapper[5081]: I1003 17:11:28.195165 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-k4jsb" Oct 03 17:11:30 crc kubenswrapper[5081]: I1003 17:11:30.647856 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:11:30 crc kubenswrapper[5081]: I1003 17:11:30.648253 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:11:30 crc kubenswrapper[5081]: I1003 17:11:30.648345 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:11:30 crc kubenswrapper[5081]: I1003 17:11:30.649722 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ba3c449f9ba6ce0a07d4ade825ef0ef780d0fc3b5448da6b455aae74bb2d7b4b"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:11:30 crc kubenswrapper[5081]: I1003 17:11:30.649824 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://ba3c449f9ba6ce0a07d4ade825ef0ef780d0fc3b5448da6b455aae74bb2d7b4b" gracePeriod=600 Oct 03 17:11:31 crc kubenswrapper[5081]: I1003 17:11:31.224038 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="ba3c449f9ba6ce0a07d4ade825ef0ef780d0fc3b5448da6b455aae74bb2d7b4b" exitCode=0 Oct 03 17:11:31 crc kubenswrapper[5081]: I1003 17:11:31.224259 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"ba3c449f9ba6ce0a07d4ade825ef0ef780d0fc3b5448da6b455aae74bb2d7b4b"} Oct 03 17:11:31 crc kubenswrapper[5081]: I1003 17:11:31.224345 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f"} Oct 03 17:11:31 crc kubenswrapper[5081]: I1003 17:11:31.224367 5081 scope.go:117] "RemoveContainer" containerID="65701cf377b1c358e687f19e9c64c73e1d8053d53a68861e49cd1d8408cf27d3" Oct 03 17:11:33 crc kubenswrapper[5081]: I1003 17:11:33.973204 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:33 crc kubenswrapper[5081]: I1003 17:11:33.973869 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:34 crc kubenswrapper[5081]: I1003 17:11:34.972493 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-6524-account-create-92zkv"] Oct 03 17:11:34 crc kubenswrapper[5081]: E1003 17:11:34.973106 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09af7c8f-dcd4-4379-905e-ecf7234f9432" containerName="mariadb-database-create" Oct 03 17:11:34 crc kubenswrapper[5081]: I1003 17:11:34.973128 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="09af7c8f-dcd4-4379-905e-ecf7234f9432" containerName="mariadb-database-create" Oct 03 17:11:34 crc kubenswrapper[5081]: I1003 17:11:34.973337 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="09af7c8f-dcd4-4379-905e-ecf7234f9432" containerName="mariadb-database-create" Oct 03 17:11:34 crc kubenswrapper[5081]: I1003 17:11:34.974415 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-6524-account-create-92zkv" Oct 03 17:11:34 crc kubenswrapper[5081]: I1003 17:11:34.985623 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-6524-account-create-92zkv"] Oct 03 17:11:34 crc kubenswrapper[5081]: I1003 17:11:34.987186 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Oct 03 17:11:35 crc kubenswrapper[5081]: I1003 17:11:35.114373 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb8k8\" (UniqueName: \"kubernetes.io/projected/65e88ab0-0172-4a1a-82ca-3549c445210a-kube-api-access-lb8k8\") pod \"heat-6524-account-create-92zkv\" (UID: \"65e88ab0-0172-4a1a-82ca-3549c445210a\") " pod="openstack/heat-6524-account-create-92zkv" Oct 03 17:11:35 crc kubenswrapper[5081]: I1003 17:11:35.216497 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb8k8\" (UniqueName: \"kubernetes.io/projected/65e88ab0-0172-4a1a-82ca-3549c445210a-kube-api-access-lb8k8\") pod \"heat-6524-account-create-92zkv\" (UID: \"65e88ab0-0172-4a1a-82ca-3549c445210a\") " pod="openstack/heat-6524-account-create-92zkv" Oct 03 17:11:35 crc kubenswrapper[5081]: I1003 17:11:35.244607 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb8k8\" (UniqueName: \"kubernetes.io/projected/65e88ab0-0172-4a1a-82ca-3549c445210a-kube-api-access-lb8k8\") pod \"heat-6524-account-create-92zkv\" (UID: \"65e88ab0-0172-4a1a-82ca-3549c445210a\") " pod="openstack/heat-6524-account-create-92zkv" Oct 03 17:11:35 crc kubenswrapper[5081]: I1003 17:11:35.304705 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-6524-account-create-92zkv" Oct 03 17:11:35 crc kubenswrapper[5081]: W1003 17:11:35.780301 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65e88ab0_0172_4a1a_82ca_3549c445210a.slice/crio-b4fb86a69c713e3a48931b47cd2dc3c18fbc7af10b7b1d5c352ff4671631727b WatchSource:0}: Error finding container b4fb86a69c713e3a48931b47cd2dc3c18fbc7af10b7b1d5c352ff4671631727b: Status 404 returned error can't find the container with id b4fb86a69c713e3a48931b47cd2dc3c18fbc7af10b7b1d5c352ff4671631727b Oct 03 17:11:35 crc kubenswrapper[5081]: I1003 17:11:35.782254 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-6524-account-create-92zkv"] Oct 03 17:11:36 crc kubenswrapper[5081]: I1003 17:11:36.032316 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-fbf2-account-create-z77t8"] Oct 03 17:11:36 crc kubenswrapper[5081]: I1003 17:11:36.041764 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-fbf2-account-create-z77t8"] Oct 03 17:11:36 crc kubenswrapper[5081]: I1003 17:11:36.272221 5081 generic.go:334] "Generic (PLEG): container finished" podID="65e88ab0-0172-4a1a-82ca-3549c445210a" containerID="7730edc6c2532e5e8c2c8c95e3cdaf27d0786ce665748fe56d8d6fd5dd0a2377" exitCode=0 Oct 03 17:11:36 crc kubenswrapper[5081]: I1003 17:11:36.272292 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-6524-account-create-92zkv" event={"ID":"65e88ab0-0172-4a1a-82ca-3549c445210a","Type":"ContainerDied","Data":"7730edc6c2532e5e8c2c8c95e3cdaf27d0786ce665748fe56d8d6fd5dd0a2377"} Oct 03 17:11:36 crc kubenswrapper[5081]: I1003 17:11:36.272779 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-6524-account-create-92zkv" event={"ID":"65e88ab0-0172-4a1a-82ca-3549c445210a","Type":"ContainerStarted","Data":"b4fb86a69c713e3a48931b47cd2dc3c18fbc7af10b7b1d5c352ff4671631727b"} Oct 03 17:11:37 crc kubenswrapper[5081]: I1003 17:11:37.599924 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-6524-account-create-92zkv" Oct 03 17:11:37 crc kubenswrapper[5081]: I1003 17:11:37.779244 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb8k8\" (UniqueName: \"kubernetes.io/projected/65e88ab0-0172-4a1a-82ca-3549c445210a-kube-api-access-lb8k8\") pod \"65e88ab0-0172-4a1a-82ca-3549c445210a\" (UID: \"65e88ab0-0172-4a1a-82ca-3549c445210a\") " Oct 03 17:11:37 crc kubenswrapper[5081]: I1003 17:11:37.784914 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65e88ab0-0172-4a1a-82ca-3549c445210a-kube-api-access-lb8k8" (OuterVolumeSpecName: "kube-api-access-lb8k8") pod "65e88ab0-0172-4a1a-82ca-3549c445210a" (UID: "65e88ab0-0172-4a1a-82ca-3549c445210a"). InnerVolumeSpecName "kube-api-access-lb8k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:11:37 crc kubenswrapper[5081]: I1003 17:11:37.837774 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07fb0241-0219-4ade-a23a-18ea596692a1" path="/var/lib/kubelet/pods/07fb0241-0219-4ade-a23a-18ea596692a1/volumes" Oct 03 17:11:37 crc kubenswrapper[5081]: I1003 17:11:37.881926 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb8k8\" (UniqueName: \"kubernetes.io/projected/65e88ab0-0172-4a1a-82ca-3549c445210a-kube-api-access-lb8k8\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:38 crc kubenswrapper[5081]: I1003 17:11:38.292480 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-6524-account-create-92zkv" event={"ID":"65e88ab0-0172-4a1a-82ca-3549c445210a","Type":"ContainerDied","Data":"b4fb86a69c713e3a48931b47cd2dc3c18fbc7af10b7b1d5c352ff4671631727b"} Oct 03 17:11:38 crc kubenswrapper[5081]: I1003 17:11:38.292517 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4fb86a69c713e3a48931b47cd2dc3c18fbc7af10b7b1d5c352ff4671631727b" Oct 03 17:11:38 crc kubenswrapper[5081]: I1003 17:11:38.292520 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-6524-account-create-92zkv" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.011907 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-s9zm6"] Oct 03 17:11:40 crc kubenswrapper[5081]: E1003 17:11:40.012707 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65e88ab0-0172-4a1a-82ca-3549c445210a" containerName="mariadb-account-create" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.012724 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="65e88ab0-0172-4a1a-82ca-3549c445210a" containerName="mariadb-account-create" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.012970 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="65e88ab0-0172-4a1a-82ca-3549c445210a" containerName="mariadb-account-create" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.013614 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.016389 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-d79b4" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.016618 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.027531 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-s9zm6"] Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.124256 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-config-data\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.124331 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc4pm\" (UniqueName: \"kubernetes.io/projected/1773a7b7-861b-4115-b891-31f17d7b0281-kube-api-access-xc4pm\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.124538 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-combined-ca-bundle\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.226149 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-combined-ca-bundle\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.226217 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-config-data\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.226268 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc4pm\" (UniqueName: \"kubernetes.io/projected/1773a7b7-861b-4115-b891-31f17d7b0281-kube-api-access-xc4pm\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.232778 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-config-data\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.233216 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-combined-ca-bundle\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.247896 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc4pm\" (UniqueName: \"kubernetes.io/projected/1773a7b7-861b-4115-b891-31f17d7b0281-kube-api-access-xc4pm\") pod \"heat-db-sync-s9zm6\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.337919 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:40 crc kubenswrapper[5081]: I1003 17:11:40.791461 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-s9zm6"] Oct 03 17:11:41 crc kubenswrapper[5081]: I1003 17:11:41.319896 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s9zm6" event={"ID":"1773a7b7-861b-4115-b891-31f17d7b0281","Type":"ContainerStarted","Data":"b2d9d439cccb49ca5bd2afda5974726c59ecb840940b7a6bd6824d34d0e02b77"} Oct 03 17:11:42 crc kubenswrapper[5081]: I1003 17:11:42.027849 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-jm9jp"] Oct 03 17:11:42 crc kubenswrapper[5081]: I1003 17:11:42.042936 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-jm9jp"] Oct 03 17:11:43 crc kubenswrapper[5081]: I1003 17:11:43.859373 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a89a4aa-b326-4925-a274-510acf1200e9" path="/var/lib/kubelet/pods/2a89a4aa-b326-4925-a274-510acf1200e9/volumes" Oct 03 17:11:45 crc kubenswrapper[5081]: I1003 17:11:45.803719 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:47 crc kubenswrapper[5081]: I1003 17:11:47.391044 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s9zm6" event={"ID":"1773a7b7-861b-4115-b891-31f17d7b0281","Type":"ContainerStarted","Data":"5e0eeb4ed5c1d005c6221cf5d3cbc239c2b676d769d794dfd0b6717c3d768167"} Oct 03 17:11:47 crc kubenswrapper[5081]: I1003 17:11:47.427552 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-s9zm6" podStartSLOduration=2.421608904 podStartE2EDuration="8.427488041s" podCreationTimestamp="2025-10-03 17:11:39 +0000 UTC" firstStartedPulling="2025-10-03 17:11:40.7967983 +0000 UTC m=+6219.762354913" lastFinishedPulling="2025-10-03 17:11:46.802677437 +0000 UTC m=+6225.768234050" observedRunningTime="2025-10-03 17:11:47.419339058 +0000 UTC m=+6226.384895671" watchObservedRunningTime="2025-10-03 17:11:47.427488041 +0000 UTC m=+6226.393044654" Oct 03 17:11:47 crc kubenswrapper[5081]: I1003 17:11:47.486602 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-bdb759599-v7cc7" Oct 03 17:11:47 crc kubenswrapper[5081]: I1003 17:11:47.540474 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-89cbfd487-5hdbz"] Oct 03 17:11:47 crc kubenswrapper[5081]: I1003 17:11:47.541081 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-89cbfd487-5hdbz" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon-log" containerID="cri-o://244e9d2b203d71adb49760c4948166a58535d5be74f4db2a4062233851374648" gracePeriod=30 Oct 03 17:11:47 crc kubenswrapper[5081]: I1003 17:11:47.541539 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-89cbfd487-5hdbz" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" containerID="cri-o://d65accdacb0fc8da7399ac70c0431da583b8ed4b4339982df558a82a2174a0f9" gracePeriod=30 Oct 03 17:11:50 crc kubenswrapper[5081]: I1003 17:11:50.431106 5081 generic.go:334] "Generic (PLEG): container finished" podID="1773a7b7-861b-4115-b891-31f17d7b0281" containerID="5e0eeb4ed5c1d005c6221cf5d3cbc239c2b676d769d794dfd0b6717c3d768167" exitCode=0 Oct 03 17:11:50 crc kubenswrapper[5081]: I1003 17:11:50.431202 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s9zm6" event={"ID":"1773a7b7-861b-4115-b891-31f17d7b0281","Type":"ContainerDied","Data":"5e0eeb4ed5c1d005c6221cf5d3cbc239c2b676d769d794dfd0b6717c3d768167"} Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.442806 5081 generic.go:334] "Generic (PLEG): container finished" podID="5692d3e9-857c-42bb-acbc-68142e710415" containerID="d65accdacb0fc8da7399ac70c0431da583b8ed4b4339982df558a82a2174a0f9" exitCode=0 Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.442900 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-89cbfd487-5hdbz" event={"ID":"5692d3e9-857c-42bb-acbc-68142e710415","Type":"ContainerDied","Data":"d65accdacb0fc8da7399ac70c0431da583b8ed4b4339982df558a82a2174a0f9"} Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.792221 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.953233 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-config-data\") pod \"1773a7b7-861b-4115-b891-31f17d7b0281\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.953505 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-combined-ca-bundle\") pod \"1773a7b7-861b-4115-b891-31f17d7b0281\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.953532 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xc4pm\" (UniqueName: \"kubernetes.io/projected/1773a7b7-861b-4115-b891-31f17d7b0281-kube-api-access-xc4pm\") pod \"1773a7b7-861b-4115-b891-31f17d7b0281\" (UID: \"1773a7b7-861b-4115-b891-31f17d7b0281\") " Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.960131 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1773a7b7-861b-4115-b891-31f17d7b0281-kube-api-access-xc4pm" (OuterVolumeSpecName: "kube-api-access-xc4pm") pod "1773a7b7-861b-4115-b891-31f17d7b0281" (UID: "1773a7b7-861b-4115-b891-31f17d7b0281"). InnerVolumeSpecName "kube-api-access-xc4pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:11:51 crc kubenswrapper[5081]: I1003 17:11:51.980745 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1773a7b7-861b-4115-b891-31f17d7b0281" (UID: "1773a7b7-861b-4115-b891-31f17d7b0281"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:11:52 crc kubenswrapper[5081]: I1003 17:11:52.026887 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-config-data" (OuterVolumeSpecName: "config-data") pod "1773a7b7-861b-4115-b891-31f17d7b0281" (UID: "1773a7b7-861b-4115-b891-31f17d7b0281"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:11:52 crc kubenswrapper[5081]: I1003 17:11:52.055875 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:52 crc kubenswrapper[5081]: I1003 17:11:52.055934 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xc4pm\" (UniqueName: \"kubernetes.io/projected/1773a7b7-861b-4115-b891-31f17d7b0281-kube-api-access-xc4pm\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:52 crc kubenswrapper[5081]: I1003 17:11:52.055948 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1773a7b7-861b-4115-b891-31f17d7b0281-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:11:52 crc kubenswrapper[5081]: I1003 17:11:52.455786 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s9zm6" event={"ID":"1773a7b7-861b-4115-b891-31f17d7b0281","Type":"ContainerDied","Data":"b2d9d439cccb49ca5bd2afda5974726c59ecb840940b7a6bd6824d34d0e02b77"} Oct 03 17:11:52 crc kubenswrapper[5081]: I1003 17:11:52.456068 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2d9d439cccb49ca5bd2afda5974726c59ecb840940b7a6bd6824d34d0e02b77" Oct 03 17:11:52 crc kubenswrapper[5081]: I1003 17:11:52.456018 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s9zm6" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.551661 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-764cf6b767-tpqfz"] Oct 03 17:11:53 crc kubenswrapper[5081]: E1003 17:11:53.552525 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1773a7b7-861b-4115-b891-31f17d7b0281" containerName="heat-db-sync" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.552543 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1773a7b7-861b-4115-b891-31f17d7b0281" containerName="heat-db-sync" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.552797 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1773a7b7-861b-4115-b891-31f17d7b0281" containerName="heat-db-sync" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.553742 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.559124 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.559459 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.559623 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-d79b4" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.579291 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-764cf6b767-tpqfz"] Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.683113 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-59bdc8c586-94n9t"] Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.684979 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.688238 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.693761 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk6zb\" (UniqueName: \"kubernetes.io/projected/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-kube-api-access-sk6zb\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.693828 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-config-data\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.694051 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-config-data-custom\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.694085 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-combined-ca-bundle\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.715303 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-59bdc8c586-94n9t"] Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.732685 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6cdc8c77c-jb7zk"] Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.734385 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.738830 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.756399 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6cdc8c77c-jb7zk"] Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796160 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-config-data-custom\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796232 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-combined-ca-bundle\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796267 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-combined-ca-bundle\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796371 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk6zb\" (UniqueName: \"kubernetes.io/projected/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-kube-api-access-sk6zb\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796446 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-config-data\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796467 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-config-data\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796634 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czwf8\" (UniqueName: \"kubernetes.io/projected/ae3762c4-4c4b-4aa3-894c-b690f2278873-kube-api-access-czwf8\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.796664 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-config-data-custom\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.803178 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-config-data\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.808407 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-config-data-custom\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.809380 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-combined-ca-bundle\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.812435 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk6zb\" (UniqueName: \"kubernetes.io/projected/cccdc4a3-640a-4f97-800c-d5e8dce5c50e-kube-api-access-sk6zb\") pod \"heat-engine-764cf6b767-tpqfz\" (UID: \"cccdc4a3-640a-4f97-800c-d5e8dce5c50e\") " pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.885616 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898256 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-config-data-custom\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898316 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-config-data\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898398 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czwf8\" (UniqueName: \"kubernetes.io/projected/ae3762c4-4c4b-4aa3-894c-b690f2278873-kube-api-access-czwf8\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898421 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-config-data-custom\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898454 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-combined-ca-bundle\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898485 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-combined-ca-bundle\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898530 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tt2r\" (UniqueName: \"kubernetes.io/projected/6e40af3b-73b5-4f03-87f7-cad577ecb4da-kube-api-access-6tt2r\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.898548 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-config-data\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.903351 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-combined-ca-bundle\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.904353 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-config-data-custom\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.904715 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae3762c4-4c4b-4aa3-894c-b690f2278873-config-data\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:53 crc kubenswrapper[5081]: I1003 17:11:53.916956 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czwf8\" (UniqueName: \"kubernetes.io/projected/ae3762c4-4c4b-4aa3-894c-b690f2278873-kube-api-access-czwf8\") pod \"heat-api-59bdc8c586-94n9t\" (UID: \"ae3762c4-4c4b-4aa3-894c-b690f2278873\") " pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.000826 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-config-data\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.001296 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-combined-ca-bundle\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.001354 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tt2r\" (UniqueName: \"kubernetes.io/projected/6e40af3b-73b5-4f03-87f7-cad577ecb4da-kube-api-access-6tt2r\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.001382 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-config-data-custom\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.006377 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-config-data\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.011201 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-combined-ca-bundle\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.025506 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e40af3b-73b5-4f03-87f7-cad577ecb4da-config-data-custom\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.025889 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.027318 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tt2r\" (UniqueName: \"kubernetes.io/projected/6e40af3b-73b5-4f03-87f7-cad577ecb4da-kube-api-access-6tt2r\") pod \"heat-cfnapi-6cdc8c77c-jb7zk\" (UID: \"6e40af3b-73b5-4f03-87f7-cad577ecb4da\") " pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.074925 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.372044 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-764cf6b767-tpqfz"] Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.477387 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-764cf6b767-tpqfz" event={"ID":"cccdc4a3-640a-4f97-800c-d5e8dce5c50e","Type":"ContainerStarted","Data":"c26d5a1294ed355f4c208b09b3765cc36e84789cafe7d4b3f64cdfec85aaf61c"} Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.552505 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-59bdc8c586-94n9t"] Oct 03 17:11:54 crc kubenswrapper[5081]: I1003 17:11:54.662902 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6cdc8c77c-jb7zk"] Oct 03 17:11:55 crc kubenswrapper[5081]: I1003 17:11:55.507240 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" event={"ID":"6e40af3b-73b5-4f03-87f7-cad577ecb4da","Type":"ContainerStarted","Data":"a030c9587e6962b36286c72d15181d4ce7d2546336f96b3262b2d78abd5f78c7"} Oct 03 17:11:55 crc kubenswrapper[5081]: I1003 17:11:55.509075 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-59bdc8c586-94n9t" event={"ID":"ae3762c4-4c4b-4aa3-894c-b690f2278873","Type":"ContainerStarted","Data":"6d20282ef5003cf9219fcbca4eae6bf523250e5cc6c136356b75ede63c001fa3"} Oct 03 17:11:55 crc kubenswrapper[5081]: I1003 17:11:55.516788 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-764cf6b767-tpqfz" event={"ID":"cccdc4a3-640a-4f97-800c-d5e8dce5c50e","Type":"ContainerStarted","Data":"05fe37bd8d6e41e471857b806793aa560e5af467665ab173258e6efad98bea2e"} Oct 03 17:11:55 crc kubenswrapper[5081]: I1003 17:11:55.517060 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:11:55 crc kubenswrapper[5081]: I1003 17:11:55.536988 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-764cf6b767-tpqfz" podStartSLOduration=2.536967632 podStartE2EDuration="2.536967632s" podCreationTimestamp="2025-10-03 17:11:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:11:55.532506024 +0000 UTC m=+6234.498062657" watchObservedRunningTime="2025-10-03 17:11:55.536967632 +0000 UTC m=+6234.502524245" Oct 03 17:11:56 crc kubenswrapper[5081]: I1003 17:11:56.144694 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-89cbfd487-5hdbz" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Oct 03 17:11:57 crc kubenswrapper[5081]: I1003 17:11:57.540180 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" event={"ID":"6e40af3b-73b5-4f03-87f7-cad577ecb4da","Type":"ContainerStarted","Data":"2b0a11544636f027d78e5fbc1587e3e94935b20dbeaf1fdd34d035333ccd23a9"} Oct 03 17:11:57 crc kubenswrapper[5081]: I1003 17:11:57.542814 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:11:57 crc kubenswrapper[5081]: I1003 17:11:57.545803 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-59bdc8c586-94n9t" event={"ID":"ae3762c4-4c4b-4aa3-894c-b690f2278873","Type":"ContainerStarted","Data":"a43b4640686ccfeeff74624bb91587036a364a3475ca88a2261ef072de78bf3b"} Oct 03 17:11:57 crc kubenswrapper[5081]: I1003 17:11:57.546116 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:11:57 crc kubenswrapper[5081]: I1003 17:11:57.565870 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" podStartSLOduration=2.814852831 podStartE2EDuration="4.565846816s" podCreationTimestamp="2025-10-03 17:11:53 +0000 UTC" firstStartedPulling="2025-10-03 17:11:54.67089152 +0000 UTC m=+6233.636448123" lastFinishedPulling="2025-10-03 17:11:56.421885495 +0000 UTC m=+6235.387442108" observedRunningTime="2025-10-03 17:11:57.559896175 +0000 UTC m=+6236.525452798" watchObservedRunningTime="2025-10-03 17:11:57.565846816 +0000 UTC m=+6236.531403429" Oct 03 17:11:57 crc kubenswrapper[5081]: I1003 17:11:57.584232 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-59bdc8c586-94n9t" podStartSLOduration=2.723042192 podStartE2EDuration="4.584213062s" podCreationTimestamp="2025-10-03 17:11:53 +0000 UTC" firstStartedPulling="2025-10-03 17:11:54.564457682 +0000 UTC m=+6233.530014295" lastFinishedPulling="2025-10-03 17:11:56.425628552 +0000 UTC m=+6235.391185165" observedRunningTime="2025-10-03 17:11:57.577756837 +0000 UTC m=+6236.543313450" watchObservedRunningTime="2025-10-03 17:11:57.584213062 +0000 UTC m=+6236.549769675" Oct 03 17:12:05 crc kubenswrapper[5081]: I1003 17:12:05.321981 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-59bdc8c586-94n9t" Oct 03 17:12:05 crc kubenswrapper[5081]: I1003 17:12:05.410465 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-6cdc8c77c-jb7zk" Oct 03 17:12:06 crc kubenswrapper[5081]: I1003 17:12:06.144228 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-89cbfd487-5hdbz" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Oct 03 17:12:13 crc kubenswrapper[5081]: I1003 17:12:13.916213 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-764cf6b767-tpqfz" Oct 03 17:12:16 crc kubenswrapper[5081]: I1003 17:12:16.144855 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-89cbfd487-5hdbz" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Oct 03 17:12:16 crc kubenswrapper[5081]: I1003 17:12:16.145268 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:12:17 crc kubenswrapper[5081]: I1003 17:12:17.741245 5081 generic.go:334] "Generic (PLEG): container finished" podID="5692d3e9-857c-42bb-acbc-68142e710415" containerID="244e9d2b203d71adb49760c4948166a58535d5be74f4db2a4062233851374648" exitCode=137 Oct 03 17:12:17 crc kubenswrapper[5081]: I1003 17:12:17.741426 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-89cbfd487-5hdbz" event={"ID":"5692d3e9-857c-42bb-acbc-68142e710415","Type":"ContainerDied","Data":"244e9d2b203d71adb49760c4948166a58535d5be74f4db2a4062233851374648"} Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.725011 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.760457 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-89cbfd487-5hdbz" event={"ID":"5692d3e9-857c-42bb-acbc-68142e710415","Type":"ContainerDied","Data":"ccd0635724c231b8451a0ebbc0f731a09d48561e8bebd2b52a48fe544c6e00e5"} Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.760511 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-89cbfd487-5hdbz" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.760591 5081 scope.go:117] "RemoveContainer" containerID="d65accdacb0fc8da7399ac70c0431da583b8ed4b4339982df558a82a2174a0f9" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.824463 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-config-data\") pod \"5692d3e9-857c-42bb-acbc-68142e710415\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.824546 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5692d3e9-857c-42bb-acbc-68142e710415-logs\") pod \"5692d3e9-857c-42bb-acbc-68142e710415\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.825696 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5692d3e9-857c-42bb-acbc-68142e710415-horizon-secret-key\") pod \"5692d3e9-857c-42bb-acbc-68142e710415\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.825369 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5692d3e9-857c-42bb-acbc-68142e710415-logs" (OuterVolumeSpecName: "logs") pod "5692d3e9-857c-42bb-acbc-68142e710415" (UID: "5692d3e9-857c-42bb-acbc-68142e710415"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.825791 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-scripts\") pod \"5692d3e9-857c-42bb-acbc-68142e710415\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.825869 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghkgt\" (UniqueName: \"kubernetes.io/projected/5692d3e9-857c-42bb-acbc-68142e710415-kube-api-access-ghkgt\") pod \"5692d3e9-857c-42bb-acbc-68142e710415\" (UID: \"5692d3e9-857c-42bb-acbc-68142e710415\") " Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.826343 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5692d3e9-857c-42bb-acbc-68142e710415-logs\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.837985 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5692d3e9-857c-42bb-acbc-68142e710415-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5692d3e9-857c-42bb-acbc-68142e710415" (UID: "5692d3e9-857c-42bb-acbc-68142e710415"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.838949 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5692d3e9-857c-42bb-acbc-68142e710415-kube-api-access-ghkgt" (OuterVolumeSpecName: "kube-api-access-ghkgt") pod "5692d3e9-857c-42bb-acbc-68142e710415" (UID: "5692d3e9-857c-42bb-acbc-68142e710415"). InnerVolumeSpecName "kube-api-access-ghkgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.867073 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-config-data" (OuterVolumeSpecName: "config-data") pod "5692d3e9-857c-42bb-acbc-68142e710415" (UID: "5692d3e9-857c-42bb-acbc-68142e710415"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.872135 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-scripts" (OuterVolumeSpecName: "scripts") pod "5692d3e9-857c-42bb-acbc-68142e710415" (UID: "5692d3e9-857c-42bb-acbc-68142e710415"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.928909 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghkgt\" (UniqueName: \"kubernetes.io/projected/5692d3e9-857c-42bb-acbc-68142e710415-kube-api-access-ghkgt\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.928952 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.928961 5081 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5692d3e9-857c-42bb-acbc-68142e710415-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.928969 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5692d3e9-857c-42bb-acbc-68142e710415-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:18 crc kubenswrapper[5081]: I1003 17:12:18.942667 5081 scope.go:117] "RemoveContainer" containerID="244e9d2b203d71adb49760c4948166a58535d5be74f4db2a4062233851374648" Oct 03 17:12:19 crc kubenswrapper[5081]: I1003 17:12:19.095292 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-89cbfd487-5hdbz"] Oct 03 17:12:19 crc kubenswrapper[5081]: I1003 17:12:19.103741 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-89cbfd487-5hdbz"] Oct 03 17:12:19 crc kubenswrapper[5081]: I1003 17:12:19.844980 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5692d3e9-857c-42bb-acbc-68142e710415" path="/var/lib/kubelet/pods/5692d3e9-857c-42bb-acbc-68142e710415/volumes" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.049937 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58"] Oct 03 17:12:24 crc kubenswrapper[5081]: E1003 17:12:24.050659 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon-log" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.050673 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon-log" Oct 03 17:12:24 crc kubenswrapper[5081]: E1003 17:12:24.050685 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.050691 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.050896 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.050913 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5692d3e9-857c-42bb-acbc-68142e710415" containerName="horizon-log" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.052314 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.054367 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.071305 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58"] Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.138586 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.138762 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qhtk\" (UniqueName: \"kubernetes.io/projected/e940af2e-a01b-4861-9b5e-db37af275f1c-kube-api-access-4qhtk\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.138796 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.240278 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qhtk\" (UniqueName: \"kubernetes.io/projected/e940af2e-a01b-4861-9b5e-db37af275f1c-kube-api-access-4qhtk\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.240326 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.240433 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.240848 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.240849 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.260116 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qhtk\" (UniqueName: \"kubernetes.io/projected/e940af2e-a01b-4861-9b5e-db37af275f1c-kube-api-access-4qhtk\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.373054 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.734813 5081 scope.go:117] "RemoveContainer" containerID="6399490e5383323e3e22946240082d352ebff157f1442d1598a6ea16677f4252" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.767055 5081 scope.go:117] "RemoveContainer" containerID="d672d848edbf3802fc589777f88d24286984a46b56a1fe5f9a6930b1934fb984" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.810764 5081 scope.go:117] "RemoveContainer" containerID="1e2de4cb5973b3fcff5d7e4adcc71758676813de4ee67f9ef023419d234bd94f" Oct 03 17:12:24 crc kubenswrapper[5081]: I1003 17:12:24.839504 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58"] Oct 03 17:12:24 crc kubenswrapper[5081]: W1003 17:12:24.852487 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode940af2e_a01b_4861_9b5e_db37af275f1c.slice/crio-ea3f2af404855d214a93dfc847294e2df789474375c2a7a25f44421e8407877b WatchSource:0}: Error finding container ea3f2af404855d214a93dfc847294e2df789474375c2a7a25f44421e8407877b: Status 404 returned error can't find the container with id ea3f2af404855d214a93dfc847294e2df789474375c2a7a25f44421e8407877b Oct 03 17:12:25 crc kubenswrapper[5081]: I1003 17:12:25.830532 5081 generic.go:334] "Generic (PLEG): container finished" podID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerID="acbcc4c9e00a8415d2d984d0ee0f6b380482454eaaf87f693bb53689ff3a1609" exitCode=0 Oct 03 17:12:25 crc kubenswrapper[5081]: I1003 17:12:25.840090 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" event={"ID":"e940af2e-a01b-4861-9b5e-db37af275f1c","Type":"ContainerDied","Data":"acbcc4c9e00a8415d2d984d0ee0f6b380482454eaaf87f693bb53689ff3a1609"} Oct 03 17:12:25 crc kubenswrapper[5081]: I1003 17:12:25.840131 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" event={"ID":"e940af2e-a01b-4861-9b5e-db37af275f1c","Type":"ContainerStarted","Data":"ea3f2af404855d214a93dfc847294e2df789474375c2a7a25f44421e8407877b"} Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.403877 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cjlq2"] Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.410137 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.475850 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjlq2"] Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.486043 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-utilities\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.486329 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-catalog-content\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.486430 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjp2f\" (UniqueName: \"kubernetes.io/projected/bea78b20-3f4a-4276-828d-628388f8e1df-kube-api-access-mjp2f\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.588317 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjp2f\" (UniqueName: \"kubernetes.io/projected/bea78b20-3f4a-4276-828d-628388f8e1df-kube-api-access-mjp2f\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.588787 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-utilities\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.589108 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-catalog-content\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.589537 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-utilities\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.589613 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-catalog-content\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.613453 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjp2f\" (UniqueName: \"kubernetes.io/projected/bea78b20-3f4a-4276-828d-628388f8e1df-kube-api-access-mjp2f\") pod \"redhat-operators-cjlq2\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:26 crc kubenswrapper[5081]: I1003 17:12:26.775372 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:27 crc kubenswrapper[5081]: I1003 17:12:27.242507 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjlq2"] Oct 03 17:12:27 crc kubenswrapper[5081]: W1003 17:12:27.256589 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbea78b20_3f4a_4276_828d_628388f8e1df.slice/crio-00207ee477aa2a5dde6380162b0200dde10cef461818b128fb41ea67c4148016 WatchSource:0}: Error finding container 00207ee477aa2a5dde6380162b0200dde10cef461818b128fb41ea67c4148016: Status 404 returned error can't find the container with id 00207ee477aa2a5dde6380162b0200dde10cef461818b128fb41ea67c4148016 Oct 03 17:12:27 crc kubenswrapper[5081]: I1003 17:12:27.852450 5081 generic.go:334] "Generic (PLEG): container finished" podID="bea78b20-3f4a-4276-828d-628388f8e1df" containerID="969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683" exitCode=0 Oct 03 17:12:27 crc kubenswrapper[5081]: I1003 17:12:27.852523 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjlq2" event={"ID":"bea78b20-3f4a-4276-828d-628388f8e1df","Type":"ContainerDied","Data":"969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683"} Oct 03 17:12:27 crc kubenswrapper[5081]: I1003 17:12:27.852851 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjlq2" event={"ID":"bea78b20-3f4a-4276-828d-628388f8e1df","Type":"ContainerStarted","Data":"00207ee477aa2a5dde6380162b0200dde10cef461818b128fb41ea67c4148016"} Oct 03 17:12:29 crc kubenswrapper[5081]: I1003 17:12:29.873279 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjlq2" event={"ID":"bea78b20-3f4a-4276-828d-628388f8e1df","Type":"ContainerStarted","Data":"e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566"} Oct 03 17:12:29 crc kubenswrapper[5081]: I1003 17:12:29.877129 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" event={"ID":"e940af2e-a01b-4861-9b5e-db37af275f1c","Type":"ContainerStarted","Data":"c7f9615a906bb7d546d3936017f3377b6681f4d465ce13ed689f848ef4b21883"} Oct 03 17:12:30 crc kubenswrapper[5081]: I1003 17:12:30.894122 5081 generic.go:334] "Generic (PLEG): container finished" podID="bea78b20-3f4a-4276-828d-628388f8e1df" containerID="e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566" exitCode=0 Oct 03 17:12:30 crc kubenswrapper[5081]: I1003 17:12:30.894256 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjlq2" event={"ID":"bea78b20-3f4a-4276-828d-628388f8e1df","Type":"ContainerDied","Data":"e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566"} Oct 03 17:12:31 crc kubenswrapper[5081]: I1003 17:12:31.919865 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjlq2" event={"ID":"bea78b20-3f4a-4276-828d-628388f8e1df","Type":"ContainerStarted","Data":"330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56"} Oct 03 17:12:31 crc kubenswrapper[5081]: I1003 17:12:31.938285 5081 generic.go:334] "Generic (PLEG): container finished" podID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerID="c7f9615a906bb7d546d3936017f3377b6681f4d465ce13ed689f848ef4b21883" exitCode=0 Oct 03 17:12:31 crc kubenswrapper[5081]: I1003 17:12:31.938342 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" event={"ID":"e940af2e-a01b-4861-9b5e-db37af275f1c","Type":"ContainerDied","Data":"c7f9615a906bb7d546d3936017f3377b6681f4d465ce13ed689f848ef4b21883"} Oct 03 17:12:31 crc kubenswrapper[5081]: I1003 17:12:31.943853 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cjlq2" podStartSLOduration=2.309942628 podStartE2EDuration="5.943824496s" podCreationTimestamp="2025-10-03 17:12:26 +0000 UTC" firstStartedPulling="2025-10-03 17:12:27.85432657 +0000 UTC m=+6266.819883183" lastFinishedPulling="2025-10-03 17:12:31.488208437 +0000 UTC m=+6270.453765051" observedRunningTime="2025-10-03 17:12:31.938008869 +0000 UTC m=+6270.903565492" watchObservedRunningTime="2025-10-03 17:12:31.943824496 +0000 UTC m=+6270.909381109" Oct 03 17:12:32 crc kubenswrapper[5081]: I1003 17:12:32.954876 5081 generic.go:334] "Generic (PLEG): container finished" podID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerID="c1c099faba246de4a572bab578cf492e7af1d076cd85e43362083dae48498c2b" exitCode=0 Oct 03 17:12:32 crc kubenswrapper[5081]: I1003 17:12:32.954965 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" event={"ID":"e940af2e-a01b-4861-9b5e-db37af275f1c","Type":"ContainerDied","Data":"c1c099faba246de4a572bab578cf492e7af1d076cd85e43362083dae48498c2b"} Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.338781 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.392176 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qhtk\" (UniqueName: \"kubernetes.io/projected/e940af2e-a01b-4861-9b5e-db37af275f1c-kube-api-access-4qhtk\") pod \"e940af2e-a01b-4861-9b5e-db37af275f1c\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.392296 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-util\") pod \"e940af2e-a01b-4861-9b5e-db37af275f1c\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.392373 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-bundle\") pod \"e940af2e-a01b-4861-9b5e-db37af275f1c\" (UID: \"e940af2e-a01b-4861-9b5e-db37af275f1c\") " Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.394501 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-bundle" (OuterVolumeSpecName: "bundle") pod "e940af2e-a01b-4861-9b5e-db37af275f1c" (UID: "e940af2e-a01b-4861-9b5e-db37af275f1c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.401246 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-util" (OuterVolumeSpecName: "util") pod "e940af2e-a01b-4861-9b5e-db37af275f1c" (UID: "e940af2e-a01b-4861-9b5e-db37af275f1c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.407061 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e940af2e-a01b-4861-9b5e-db37af275f1c-kube-api-access-4qhtk" (OuterVolumeSpecName: "kube-api-access-4qhtk") pod "e940af2e-a01b-4861-9b5e-db37af275f1c" (UID: "e940af2e-a01b-4861-9b5e-db37af275f1c"). InnerVolumeSpecName "kube-api-access-4qhtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.495614 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qhtk\" (UniqueName: \"kubernetes.io/projected/e940af2e-a01b-4861-9b5e-db37af275f1c-kube-api-access-4qhtk\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.495666 5081 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-util\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.495679 5081 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e940af2e-a01b-4861-9b5e-db37af275f1c-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.974781 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" event={"ID":"e940af2e-a01b-4861-9b5e-db37af275f1c","Type":"ContainerDied","Data":"ea3f2af404855d214a93dfc847294e2df789474375c2a7a25f44421e8407877b"} Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.974823 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58" Oct 03 17:12:34 crc kubenswrapper[5081]: I1003 17:12:34.974836 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea3f2af404855d214a93dfc847294e2df789474375c2a7a25f44421e8407877b" Oct 03 17:12:36 crc kubenswrapper[5081]: I1003 17:12:36.775647 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:36 crc kubenswrapper[5081]: I1003 17:12:36.775954 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:36 crc kubenswrapper[5081]: I1003 17:12:36.858688 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:37 crc kubenswrapper[5081]: I1003 17:12:37.041353 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:37 crc kubenswrapper[5081]: I1003 17:12:37.099149 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjlq2"] Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.015969 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cjlq2" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="registry-server" containerID="cri-o://330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56" gracePeriod=2 Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.041501 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-6f2fx"] Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.058657 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-wxcsk"] Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.068882 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-wxcsk"] Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.077870 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-6f2fx"] Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.087669 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-wxmmr"] Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.102343 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-wxmmr"] Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.490111 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.607629 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-utilities\") pod \"bea78b20-3f4a-4276-828d-628388f8e1df\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.607700 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-catalog-content\") pod \"bea78b20-3f4a-4276-828d-628388f8e1df\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.607899 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjp2f\" (UniqueName: \"kubernetes.io/projected/bea78b20-3f4a-4276-828d-628388f8e1df-kube-api-access-mjp2f\") pod \"bea78b20-3f4a-4276-828d-628388f8e1df\" (UID: \"bea78b20-3f4a-4276-828d-628388f8e1df\") " Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.608847 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-utilities" (OuterVolumeSpecName: "utilities") pod "bea78b20-3f4a-4276-828d-628388f8e1df" (UID: "bea78b20-3f4a-4276-828d-628388f8e1df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.617126 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea78b20-3f4a-4276-828d-628388f8e1df-kube-api-access-mjp2f" (OuterVolumeSpecName: "kube-api-access-mjp2f") pod "bea78b20-3f4a-4276-828d-628388f8e1df" (UID: "bea78b20-3f4a-4276-828d-628388f8e1df"). InnerVolumeSpecName "kube-api-access-mjp2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.707755 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bea78b20-3f4a-4276-828d-628388f8e1df" (UID: "bea78b20-3f4a-4276-828d-628388f8e1df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.710702 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.710741 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bea78b20-3f4a-4276-828d-628388f8e1df-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.710752 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjp2f\" (UniqueName: \"kubernetes.io/projected/bea78b20-3f4a-4276-828d-628388f8e1df-kube-api-access-mjp2f\") on node \"crc\" DevicePath \"\"" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.840729 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14f17125-3b49-4560-aaab-3c9f0218d7a5" path="/var/lib/kubelet/pods/14f17125-3b49-4560-aaab-3c9f0218d7a5/volumes" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.841722 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cdc9f9a-f530-454b-a477-cfdce21cf6b8" path="/var/lib/kubelet/pods/1cdc9f9a-f530-454b-a477-cfdce21cf6b8/volumes" Oct 03 17:12:39 crc kubenswrapper[5081]: I1003 17:12:39.842308 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83c32c48-b8b0-418a-ac7a-e335d0564245" path="/var/lib/kubelet/pods/83c32c48-b8b0-418a-ac7a-e335d0564245/volumes" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.026227 5081 generic.go:334] "Generic (PLEG): container finished" podID="bea78b20-3f4a-4276-828d-628388f8e1df" containerID="330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56" exitCode=0 Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.026273 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjlq2" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.026283 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjlq2" event={"ID":"bea78b20-3f4a-4276-828d-628388f8e1df","Type":"ContainerDied","Data":"330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56"} Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.027621 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjlq2" event={"ID":"bea78b20-3f4a-4276-828d-628388f8e1df","Type":"ContainerDied","Data":"00207ee477aa2a5dde6380162b0200dde10cef461818b128fb41ea67c4148016"} Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.027647 5081 scope.go:117] "RemoveContainer" containerID="330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.052666 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjlq2"] Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.057858 5081 scope.go:117] "RemoveContainer" containerID="e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.067706 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cjlq2"] Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.093523 5081 scope.go:117] "RemoveContainer" containerID="969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.130230 5081 scope.go:117] "RemoveContainer" containerID="330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56" Oct 03 17:12:40 crc kubenswrapper[5081]: E1003 17:12:40.130811 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56\": container with ID starting with 330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56 not found: ID does not exist" containerID="330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.130850 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56"} err="failed to get container status \"330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56\": rpc error: code = NotFound desc = could not find container \"330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56\": container with ID starting with 330d4c28c531ae48a4e05fbb1b82b1f4d02d6029b79a90e59592afb731c78a56 not found: ID does not exist" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.130881 5081 scope.go:117] "RemoveContainer" containerID="e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566" Oct 03 17:12:40 crc kubenswrapper[5081]: E1003 17:12:40.131225 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566\": container with ID starting with e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566 not found: ID does not exist" containerID="e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.131283 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566"} err="failed to get container status \"e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566\": rpc error: code = NotFound desc = could not find container \"e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566\": container with ID starting with e973c3f58371cc8e3c3d8037a81ed5fd8d99fbfa0ca2b39f8a6f4e2c0a258566 not found: ID does not exist" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.131320 5081 scope.go:117] "RemoveContainer" containerID="969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683" Oct 03 17:12:40 crc kubenswrapper[5081]: E1003 17:12:40.131918 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683\": container with ID starting with 969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683 not found: ID does not exist" containerID="969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683" Oct 03 17:12:40 crc kubenswrapper[5081]: I1003 17:12:40.131956 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683"} err="failed to get container status \"969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683\": rpc error: code = NotFound desc = could not find container \"969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683\": container with ID starting with 969b133fef4e2555a5c574aa8ffb3f76acaf41866ead3dc31f3a9c7a0e319683 not found: ID does not exist" Oct 03 17:12:41 crc kubenswrapper[5081]: I1003 17:12:41.841074 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" path="/var/lib/kubelet/pods/bea78b20-3f4a-4276-828d-628388f8e1df/volumes" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.890845 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm"] Oct 03 17:12:46 crc kubenswrapper[5081]: E1003 17:12:46.891901 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="registry-server" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.891915 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="registry-server" Oct 03 17:12:46 crc kubenswrapper[5081]: E1003 17:12:46.891941 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerName="pull" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.891947 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerName="pull" Oct 03 17:12:46 crc kubenswrapper[5081]: E1003 17:12:46.891967 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerName="util" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.891973 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerName="util" Oct 03 17:12:46 crc kubenswrapper[5081]: E1003 17:12:46.891986 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="extract-content" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.891991 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="extract-content" Oct 03 17:12:46 crc kubenswrapper[5081]: E1003 17:12:46.892005 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="extract-utilities" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.892010 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="extract-utilities" Oct 03 17:12:46 crc kubenswrapper[5081]: E1003 17:12:46.892029 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerName="extract" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.892035 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerName="extract" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.892312 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e940af2e-a01b-4861-9b5e-db37af275f1c" containerName="extract" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.892331 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="bea78b20-3f4a-4276-828d-628388f8e1df" containerName="registry-server" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.893083 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.895819 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.896009 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.897652 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-4q254" Oct 03 17:12:46 crc kubenswrapper[5081]: I1003 17:12:46.908482 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.041353 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.043080 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.045982 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-nthlj" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.046727 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.054793 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.063701 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9vmn\" (UniqueName: \"kubernetes.io/projected/83a23695-6bcb-4dbd-909d-0f7af9be2b25-kube-api-access-m9vmn\") pod \"obo-prometheus-operator-7c8cf85677-z4jcm\" (UID: \"83a23695-6bcb-4dbd-909d-0f7af9be2b25\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.087074 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.088514 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.123801 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.168801 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b7bf516-55f6-4da8-baed-a26d0a03dbae-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-f269d\" (UID: \"8b7bf516-55f6-4da8-baed-a26d0a03dbae\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.168878 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b7bf516-55f6-4da8-baed-a26d0a03dbae-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-f269d\" (UID: \"8b7bf516-55f6-4da8-baed-a26d0a03dbae\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.168961 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9vmn\" (UniqueName: \"kubernetes.io/projected/83a23695-6bcb-4dbd-909d-0f7af9be2b25-kube-api-access-m9vmn\") pod \"obo-prometheus-operator-7c8cf85677-z4jcm\" (UID: \"83a23695-6bcb-4dbd-909d-0f7af9be2b25\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.203300 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9vmn\" (UniqueName: \"kubernetes.io/projected/83a23695-6bcb-4dbd-909d-0f7af9be2b25-kube-api-access-m9vmn\") pod \"obo-prometheus-operator-7c8cf85677-z4jcm\" (UID: \"83a23695-6bcb-4dbd-909d-0f7af9be2b25\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.214114 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.270913 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b7bf516-55f6-4da8-baed-a26d0a03dbae-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-f269d\" (UID: \"8b7bf516-55f6-4da8-baed-a26d0a03dbae\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.271015 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b7bf516-55f6-4da8-baed-a26d0a03dbae-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-f269d\" (UID: \"8b7bf516-55f6-4da8-baed-a26d0a03dbae\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.271064 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/57839bb1-8406-48c1-adf4-ed7dfe80723b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-cct5k\" (UID: \"57839bb1-8406-48c1-adf4-ed7dfe80723b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.271117 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/57839bb1-8406-48c1-adf4-ed7dfe80723b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-cct5k\" (UID: \"57839bb1-8406-48c1-adf4-ed7dfe80723b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.276300 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b7bf516-55f6-4da8-baed-a26d0a03dbae-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-f269d\" (UID: \"8b7bf516-55f6-4da8-baed-a26d0a03dbae\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.276904 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b7bf516-55f6-4da8-baed-a26d0a03dbae-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-f269d\" (UID: \"8b7bf516-55f6-4da8-baed-a26d0a03dbae\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.298755 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-7jnvg"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.312497 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.320985 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.321641 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-twhnv" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.323666 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-7jnvg"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.371924 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.373341 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/57839bb1-8406-48c1-adf4-ed7dfe80723b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-cct5k\" (UID: \"57839bb1-8406-48c1-adf4-ed7dfe80723b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.373391 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/57839bb1-8406-48c1-adf4-ed7dfe80723b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-cct5k\" (UID: \"57839bb1-8406-48c1-adf4-ed7dfe80723b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.381051 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/57839bb1-8406-48c1-adf4-ed7dfe80723b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-cct5k\" (UID: \"57839bb1-8406-48c1-adf4-ed7dfe80723b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.387303 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/57839bb1-8406-48c1-adf4-ed7dfe80723b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-778b46994f-cct5k\" (UID: \"57839bb1-8406-48c1-adf4-ed7dfe80723b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.421047 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.479745 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/157e1a24-f062-4148-af25-b1bda9a5ef03-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-7jnvg\" (UID: \"157e1a24-f062-4148-af25-b1bda9a5ef03\") " pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.479875 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvq5n\" (UniqueName: \"kubernetes.io/projected/157e1a24-f062-4148-af25-b1bda9a5ef03-kube-api-access-kvq5n\") pod \"observability-operator-cc5f78dfc-7jnvg\" (UID: \"157e1a24-f062-4148-af25-b1bda9a5ef03\") " pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.549001 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-trfrl"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.553352 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.560319 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-ztqms" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.563642 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-trfrl"] Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.581285 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvq5n\" (UniqueName: \"kubernetes.io/projected/157e1a24-f062-4148-af25-b1bda9a5ef03-kube-api-access-kvq5n\") pod \"observability-operator-cc5f78dfc-7jnvg\" (UID: \"157e1a24-f062-4148-af25-b1bda9a5ef03\") " pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.581406 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/157e1a24-f062-4148-af25-b1bda9a5ef03-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-7jnvg\" (UID: \"157e1a24-f062-4148-af25-b1bda9a5ef03\") " pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.591428 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/157e1a24-f062-4148-af25-b1bda9a5ef03-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-7jnvg\" (UID: \"157e1a24-f062-4148-af25-b1bda9a5ef03\") " pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.610360 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvq5n\" (UniqueName: \"kubernetes.io/projected/157e1a24-f062-4148-af25-b1bda9a5ef03-kube-api-access-kvq5n\") pod \"observability-operator-cc5f78dfc-7jnvg\" (UID: \"157e1a24-f062-4148-af25-b1bda9a5ef03\") " pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.672894 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.683781 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7lqq\" (UniqueName: \"kubernetes.io/projected/db565abb-fbeb-4cd1-9c93-673a81facb8c-kube-api-access-q7lqq\") pod \"perses-operator-54bc95c9fb-trfrl\" (UID: \"db565abb-fbeb-4cd1-9c93-673a81facb8c\") " pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.684252 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/db565abb-fbeb-4cd1-9c93-673a81facb8c-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-trfrl\" (UID: \"db565abb-fbeb-4cd1-9c93-673a81facb8c\") " pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.786238 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/db565abb-fbeb-4cd1-9c93-673a81facb8c-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-trfrl\" (UID: \"db565abb-fbeb-4cd1-9c93-673a81facb8c\") " pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.786349 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7lqq\" (UniqueName: \"kubernetes.io/projected/db565abb-fbeb-4cd1-9c93-673a81facb8c-kube-api-access-q7lqq\") pod \"perses-operator-54bc95c9fb-trfrl\" (UID: \"db565abb-fbeb-4cd1-9c93-673a81facb8c\") " pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.787588 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/db565abb-fbeb-4cd1-9c93-673a81facb8c-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-trfrl\" (UID: \"db565abb-fbeb-4cd1-9c93-673a81facb8c\") " pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.804851 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7lqq\" (UniqueName: \"kubernetes.io/projected/db565abb-fbeb-4cd1-9c93-673a81facb8c-kube-api-access-q7lqq\") pod \"perses-operator-54bc95c9fb-trfrl\" (UID: \"db565abb-fbeb-4cd1-9c93-673a81facb8c\") " pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:47 crc kubenswrapper[5081]: I1003 17:12:47.888293 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:48 crc kubenswrapper[5081]: I1003 17:12:48.044402 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm"] Oct 03 17:12:48 crc kubenswrapper[5081]: I1003 17:12:48.144777 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" event={"ID":"83a23695-6bcb-4dbd-909d-0f7af9be2b25","Type":"ContainerStarted","Data":"daf23faf7b0061a3f9cbf183b3dbe633539a357b023542de349bdd0e9b74384c"} Oct 03 17:12:48 crc kubenswrapper[5081]: I1003 17:12:48.170367 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k"] Oct 03 17:12:48 crc kubenswrapper[5081]: I1003 17:12:48.202995 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d"] Oct 03 17:12:48 crc kubenswrapper[5081]: I1003 17:12:48.388707 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-7jnvg"] Oct 03 17:12:48 crc kubenswrapper[5081]: W1003 17:12:48.549732 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb565abb_fbeb_4cd1_9c93_673a81facb8c.slice/crio-9cd87def0e80e43365994b74bc5f5b5f4c1b75c3ee92dc6a5f446f81846c696f WatchSource:0}: Error finding container 9cd87def0e80e43365994b74bc5f5b5f4c1b75c3ee92dc6a5f446f81846c696f: Status 404 returned error can't find the container with id 9cd87def0e80e43365994b74bc5f5b5f4c1b75c3ee92dc6a5f446f81846c696f Oct 03 17:12:48 crc kubenswrapper[5081]: I1003 17:12:48.559310 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-trfrl"] Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.048071 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-4298-account-create-2g4vp"] Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.063625 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-2fd7-account-create-frv4k"] Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.106045 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-4298-account-create-2g4vp"] Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.132617 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4098-account-create-tfpsk"] Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.156988 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4098-account-create-tfpsk"] Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.199090 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-2fd7-account-create-frv4k"] Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.203785 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" event={"ID":"57839bb1-8406-48c1-adf4-ed7dfe80723b","Type":"ContainerStarted","Data":"181455d28f789d314b70f0e2301f17ff97e6eb67859597ec29fa332e68797380"} Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.244778 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" event={"ID":"8b7bf516-55f6-4da8-baed-a26d0a03dbae","Type":"ContainerStarted","Data":"613517ba96f715433433cee03e3c22a563e931612ab6147996ed2baa73e8edb2"} Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.256576 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" event={"ID":"157e1a24-f062-4148-af25-b1bda9a5ef03","Type":"ContainerStarted","Data":"f5240677b0217dc8d4f018f3657a4fc4075c6820b5b165195815dbe77e43c928"} Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.272042 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" event={"ID":"db565abb-fbeb-4cd1-9c93-673a81facb8c","Type":"ContainerStarted","Data":"9cd87def0e80e43365994b74bc5f5b5f4c1b75c3ee92dc6a5f446f81846c696f"} Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.844419 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a478b2d-4691-4c07-a388-81448d8d15d6" path="/var/lib/kubelet/pods/2a478b2d-4691-4c07-a388-81448d8d15d6/volumes" Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.845195 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ded1a9c-6511-4384-8d65-c32ad512869f" path="/var/lib/kubelet/pods/4ded1a9c-6511-4384-8d65-c32ad512869f/volumes" Oct 03 17:12:49 crc kubenswrapper[5081]: I1003 17:12:49.845716 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f93c25e6-31cd-46d1-bfe1-ebffac538dc7" path="/var/lib/kubelet/pods/f93c25e6-31cd-46d1-bfe1-ebffac538dc7/volumes" Oct 03 17:12:52 crc kubenswrapper[5081]: I1003 17:12:52.318718 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" event={"ID":"8b7bf516-55f6-4da8-baed-a26d0a03dbae","Type":"ContainerStarted","Data":"fd00ae57eecd922be941c7047f1fb2fd2f6cd1b7a06edebe5acb0149c240631f"} Oct 03 17:12:52 crc kubenswrapper[5081]: I1003 17:12:52.320942 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" event={"ID":"57839bb1-8406-48c1-adf4-ed7dfe80723b","Type":"ContainerStarted","Data":"b8a8db08a8fd7a6210c0b6632914048c19756951f24fadb1df7abd569b8bfc4b"} Oct 03 17:12:52 crc kubenswrapper[5081]: I1003 17:12:52.346315 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-f269d" podStartSLOduration=2.666468158 podStartE2EDuration="5.346291153s" podCreationTimestamp="2025-10-03 17:12:47 +0000 UTC" firstStartedPulling="2025-10-03 17:12:48.248652015 +0000 UTC m=+6287.214208628" lastFinishedPulling="2025-10-03 17:12:50.92847501 +0000 UTC m=+6289.894031623" observedRunningTime="2025-10-03 17:12:52.33886401 +0000 UTC m=+6291.304420643" watchObservedRunningTime="2025-10-03 17:12:52.346291153 +0000 UTC m=+6291.311847766" Oct 03 17:12:52 crc kubenswrapper[5081]: I1003 17:12:52.359528 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-778b46994f-cct5k" podStartSLOduration=2.6346092150000002 podStartE2EDuration="5.359508311s" podCreationTimestamp="2025-10-03 17:12:47 +0000 UTC" firstStartedPulling="2025-10-03 17:12:48.198656323 +0000 UTC m=+6287.164212936" lastFinishedPulling="2025-10-03 17:12:50.923555419 +0000 UTC m=+6289.889112032" observedRunningTime="2025-10-03 17:12:52.359084339 +0000 UTC m=+6291.324640962" watchObservedRunningTime="2025-10-03 17:12:52.359508311 +0000 UTC m=+6291.325064924" Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.376731 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" event={"ID":"157e1a24-f062-4148-af25-b1bda9a5ef03","Type":"ContainerStarted","Data":"c83d34547480ee5ef4af4f8411b778160e4df75c4708c66ac7484cd27f8f7252"} Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.378086 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.379060 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.380279 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" event={"ID":"db565abb-fbeb-4cd1-9c93-673a81facb8c","Type":"ContainerStarted","Data":"6fdc7f8e0f19c025ddc3656f17b6fa05dd42fec0fbb37c0a1a49a52866af2c4b"} Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.380726 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.382605 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" event={"ID":"83a23695-6bcb-4dbd-909d-0f7af9be2b25","Type":"ContainerStarted","Data":"074910a476e81bc54fb72676243a5b84aa06c6bfa3b71870bd793516c9881ade"} Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.425257 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-7jnvg" podStartSLOduration=2.044844677 podStartE2EDuration="10.425236395s" podCreationTimestamp="2025-10-03 17:12:47 +0000 UTC" firstStartedPulling="2025-10-03 17:12:48.40072731 +0000 UTC m=+6287.366283923" lastFinishedPulling="2025-10-03 17:12:56.781119028 +0000 UTC m=+6295.746675641" observedRunningTime="2025-10-03 17:12:57.421064596 +0000 UTC m=+6296.386621219" watchObservedRunningTime="2025-10-03 17:12:57.425236395 +0000 UTC m=+6296.390793008" Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.479712 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" podStartSLOduration=2.286372864 podStartE2EDuration="10.479690285s" podCreationTimestamp="2025-10-03 17:12:47 +0000 UTC" firstStartedPulling="2025-10-03 17:12:48.55296557 +0000 UTC m=+6287.518522183" lastFinishedPulling="2025-10-03 17:12:56.746282991 +0000 UTC m=+6295.711839604" observedRunningTime="2025-10-03 17:12:57.459464065 +0000 UTC m=+6296.425020698" watchObservedRunningTime="2025-10-03 17:12:57.479690285 +0000 UTC m=+6296.445246898" Oct 03 17:12:57 crc kubenswrapper[5081]: I1003 17:12:57.496572 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-z4jcm" podStartSLOduration=2.865103499 podStartE2EDuration="11.496540837s" podCreationTimestamp="2025-10-03 17:12:46 +0000 UTC" firstStartedPulling="2025-10-03 17:12:48.114191984 +0000 UTC m=+6287.079748597" lastFinishedPulling="2025-10-03 17:12:56.745629322 +0000 UTC m=+6295.711185935" observedRunningTime="2025-10-03 17:12:57.490679959 +0000 UTC m=+6296.456236572" watchObservedRunningTime="2025-10-03 17:12:57.496540837 +0000 UTC m=+6296.462097460" Oct 03 17:12:59 crc kubenswrapper[5081]: I1003 17:12:59.043306 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jzxch"] Oct 03 17:12:59 crc kubenswrapper[5081]: I1003 17:12:59.056859 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jzxch"] Oct 03 17:12:59 crc kubenswrapper[5081]: I1003 17:12:59.850216 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be88c3fe-132e-482b-9d6f-f7c72ef66228" path="/var/lib/kubelet/pods/be88c3fe-132e-482b-9d6f-f7c72ef66228/volumes" Oct 03 17:13:07 crc kubenswrapper[5081]: I1003 17:13:07.893611 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-trfrl" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.323858 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.324412 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" containerName="openstackclient" containerID="cri-o://ded69724ea946f0a2544294cc22d841f931373bd41ecdcb0c02078ae0a72bb62" gracePeriod=2 Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.337700 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.403637 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:10 crc kubenswrapper[5081]: E1003 17:13:10.404267 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" containerName="openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.404287 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" containerName="openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.404518 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" containerName="openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.435740 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.442357 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.455055 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="23dd0d6c-5734-401c-8633-ce38a134b9aa" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.541921 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.541971 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrc5b\" (UniqueName: \"kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.542002 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config-secret\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.556836 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:10 crc kubenswrapper[5081]: E1003 17:13:10.571503 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-zrc5b openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="23dd0d6c-5734-401c-8633-ce38a134b9aa" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.618935 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.650118 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.653538 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.656060 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrc5b\" (UniqueName: \"kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.656110 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config-secret\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.656393 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.657471 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: E1003 17:13:10.661908 5081 projected.go:194] Error preparing data for projected volume kube-api-access-zrc5b for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (23dd0d6c-5734-401c-8633-ce38a134b9aa) does not match the UID in record. The object might have been deleted and then recreated Oct 03 17:13:10 crc kubenswrapper[5081]: E1003 17:13:10.661969 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b podName:23dd0d6c-5734-401c-8633-ce38a134b9aa nodeName:}" failed. No retries permitted until 2025-10-03 17:13:11.161952429 +0000 UTC m=+6310.127509032 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-zrc5b" (UniqueName: "kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b") pod "openstackclient" (UID: "23dd0d6c-5734-401c-8633-ce38a134b9aa") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (23dd0d6c-5734-401c-8633-ce38a134b9aa) does not match the UID in record. The object might have been deleted and then recreated Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.663104 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.720250 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config-secret\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.758146 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq776\" (UniqueName: \"kubernetes.io/projected/0abfafdf-c49b-4af3-99d0-772c7fb96392-kube-api-access-fq776\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.758260 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0abfafdf-c49b-4af3-99d0-772c7fb96392-openstack-config\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.758307 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0abfafdf-c49b-4af3-99d0-772c7fb96392-openstack-config-secret\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.829226 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.830850 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.837261 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-qv6r8" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.860639 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0abfafdf-c49b-4af3-99d0-772c7fb96392-openstack-config-secret\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.860861 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq776\" (UniqueName: \"kubernetes.io/projected/0abfafdf-c49b-4af3-99d0-772c7fb96392-kube-api-access-fq776\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.860926 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0abfafdf-c49b-4af3-99d0-772c7fb96392-openstack-config\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.861883 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0abfafdf-c49b-4af3-99d0-772c7fb96392-openstack-config\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.863596 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.867893 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0abfafdf-c49b-4af3-99d0-772c7fb96392-openstack-config-secret\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.931610 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq776\" (UniqueName: \"kubernetes.io/projected/0abfafdf-c49b-4af3-99d0-772c7fb96392-kube-api-access-fq776\") pod \"openstackclient\" (UID: \"0abfafdf-c49b-4af3-99d0-772c7fb96392\") " pod="openstack/openstackclient" Oct 03 17:13:10 crc kubenswrapper[5081]: I1003 17:13:10.963274 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm8f7\" (UniqueName: \"kubernetes.io/projected/e19c8b15-8fab-44b0-82d1-5929e7568034-kube-api-access-dm8f7\") pod \"kube-state-metrics-0\" (UID: \"e19c8b15-8fab-44b0-82d1-5929e7568034\") " pod="openstack/kube-state-metrics-0" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.006119 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.066724 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm8f7\" (UniqueName: \"kubernetes.io/projected/e19c8b15-8fab-44b0-82d1-5929e7568034-kube-api-access-dm8f7\") pod \"kube-state-metrics-0\" (UID: \"e19c8b15-8fab-44b0-82d1-5929e7568034\") " pod="openstack/kube-state-metrics-0" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.110373 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm8f7\" (UniqueName: \"kubernetes.io/projected/e19c8b15-8fab-44b0-82d1-5929e7568034-kube-api-access-dm8f7\") pod \"kube-state-metrics-0\" (UID: \"e19c8b15-8fab-44b0-82d1-5929e7568034\") " pod="openstack/kube-state-metrics-0" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.150949 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.171075 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrc5b\" (UniqueName: \"kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b\") pod \"openstackclient\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " pod="openstack/openstackclient" Oct 03 17:13:11 crc kubenswrapper[5081]: E1003 17:13:11.189889 5081 projected.go:194] Error preparing data for projected volume kube-api-access-zrc5b for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (23dd0d6c-5734-401c-8633-ce38a134b9aa) does not match the UID in record. The object might have been deleted and then recreated Oct 03 17:13:11 crc kubenswrapper[5081]: E1003 17:13:11.189971 5081 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b podName:23dd0d6c-5734-401c-8633-ce38a134b9aa nodeName:}" failed. No retries permitted until 2025-10-03 17:13:12.18994869 +0000 UTC m=+6311.155505303 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-zrc5b" (UniqueName: "kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b") pod "openstackclient" (UID: "23dd0d6c-5734-401c-8633-ce38a134b9aa") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (23dd0d6c-5734-401c-8633-ce38a134b9aa) does not match the UID in record. The object might have been deleted and then recreated Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.658607 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.680773 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="23dd0d6c-5734-401c-8633-ce38a134b9aa" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.695779 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.703237 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="23dd0d6c-5734-401c-8633-ce38a134b9aa" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.807593 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config-secret\") pod \"23dd0d6c-5734-401c-8633-ce38a134b9aa\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.807705 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config\") pod \"23dd0d6c-5734-401c-8633-ce38a134b9aa\" (UID: \"23dd0d6c-5734-401c-8633-ce38a134b9aa\") " Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.808303 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrc5b\" (UniqueName: \"kubernetes.io/projected/23dd0d6c-5734-401c-8633-ce38a134b9aa-kube-api-access-zrc5b\") on node \"crc\" DevicePath \"\"" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.808999 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "23dd0d6c-5734-401c-8633-ce38a134b9aa" (UID: "23dd0d6c-5734-401c-8633-ce38a134b9aa"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.815019 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "23dd0d6c-5734-401c-8633-ce38a134b9aa" (UID: "23dd0d6c-5734-401c-8633-ce38a134b9aa"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.912372 5081 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.912424 5081 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23dd0d6c-5734-401c-8633-ce38a134b9aa-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:13:11 crc kubenswrapper[5081]: I1003 17:13:11.913048 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23dd0d6c-5734-401c-8633-ce38a134b9aa" path="/var/lib/kubelet/pods/23dd0d6c-5734-401c-8633-ce38a134b9aa/volumes" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.008994 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.061493 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.065031 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.073078 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-cntd7" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.073299 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.073378 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.073459 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.108235 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.228904 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/0a09ecee-8e55-49a6-9849-b36d11700f3e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.228972 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/0a09ecee-8e55-49a6-9849-b36d11700f3e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.229012 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/0a09ecee-8e55-49a6-9849-b36d11700f3e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.229051 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nng4g\" (UniqueName: \"kubernetes.io/projected/0a09ecee-8e55-49a6-9849-b36d11700f3e-kube-api-access-nng4g\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.229114 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/0a09ecee-8e55-49a6-9849-b36d11700f3e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.229166 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/0a09ecee-8e55-49a6-9849-b36d11700f3e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.310789 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.334548 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/0a09ecee-8e55-49a6-9849-b36d11700f3e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.334717 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/0a09ecee-8e55-49a6-9849-b36d11700f3e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.334820 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/0a09ecee-8e55-49a6-9849-b36d11700f3e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.334911 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nng4g\" (UniqueName: \"kubernetes.io/projected/0a09ecee-8e55-49a6-9849-b36d11700f3e-kube-api-access-nng4g\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.335021 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/0a09ecee-8e55-49a6-9849-b36d11700f3e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.335156 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/0a09ecee-8e55-49a6-9849-b36d11700f3e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.337685 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/0a09ecee-8e55-49a6-9849-b36d11700f3e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.338003 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/0a09ecee-8e55-49a6-9849-b36d11700f3e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.348506 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/0a09ecee-8e55-49a6-9849-b36d11700f3e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.349343 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/0a09ecee-8e55-49a6-9849-b36d11700f3e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.353380 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/0a09ecee-8e55-49a6-9849-b36d11700f3e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.371363 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nng4g\" (UniqueName: \"kubernetes.io/projected/0a09ecee-8e55-49a6-9849-b36d11700f3e-kube-api-access-nng4g\") pod \"alertmanager-metric-storage-0\" (UID: \"0a09ecee-8e55-49a6-9849-b36d11700f3e\") " pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.515196 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.617399 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.628084 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.645065 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.645278 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.645387 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.646085 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.646202 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.646378 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-hp8pv" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.660309 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.737097 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0abfafdf-c49b-4af3-99d0-772c7fb96392","Type":"ContainerStarted","Data":"1b94de4aee1f0dab37630fe37cb0da8cd11dac7417236c7a5b1cba0d6b74b448"} Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.740200 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e19c8b15-8fab-44b0-82d1-5929e7568034","Type":"ContainerStarted","Data":"73a7e517c108292285b737edb416d817950225fcb45ecc1354f33b0157f2dbf1"} Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.747014 5081 generic.go:334] "Generic (PLEG): container finished" podID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" containerID="ded69724ea946f0a2544294cc22d841f931373bd41ecdcb0c02078ae0a72bb62" exitCode=137 Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.747156 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.752269 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="23dd0d6c-5734-401c-8633-ce38a134b9aa" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.758165 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.758238 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.758282 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/675a2924-f39b-4c35-9411-308db76e69aa-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.758303 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jpq5\" (UniqueName: \"kubernetes.io/projected/675a2924-f39b-4c35-9411-308db76e69aa-kube-api-access-2jpq5\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.758504 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.758705 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/675a2924-f39b-4c35-9411-308db76e69aa-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.758931 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-config\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.759433 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/675a2924-f39b-4c35-9411-308db76e69aa-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.859749 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="23dd0d6c-5734-401c-8633-ce38a134b9aa" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.873230 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.873352 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.873443 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/675a2924-f39b-4c35-9411-308db76e69aa-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.873476 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jpq5\" (UniqueName: \"kubernetes.io/projected/675a2924-f39b-4c35-9411-308db76e69aa-kube-api-access-2jpq5\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.873522 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.873786 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/675a2924-f39b-4c35-9411-308db76e69aa-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.873931 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-config\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.874196 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/675a2924-f39b-4c35-9411-308db76e69aa-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.880941 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/675a2924-f39b-4c35-9411-308db76e69aa-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.889757 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/675a2924-f39b-4c35-9411-308db76e69aa-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.890440 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.890474 5081 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.890503 5081 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/21fd78f26e9f89d27471cee9820390e659390c79ab228df25c37268b932c2e0f/globalmount\"" pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.891250 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-config\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.894931 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/675a2924-f39b-4c35-9411-308db76e69aa-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.921862 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jpq5\" (UniqueName: \"kubernetes.io/projected/675a2924-f39b-4c35-9411-308db76e69aa-kube-api-access-2jpq5\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:12 crc kubenswrapper[5081]: I1003 17:13:12.927455 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/675a2924-f39b-4c35-9411-308db76e69aa-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.047445 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-82cb8550-2a83-4a0a-a187-1d09a122f839\") pod \"prometheus-metric-storage-0\" (UID: \"675a2924-f39b-4c35-9411-308db76e69aa\") " pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.161122 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.164264 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.293671 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.313578 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7n95j\" (UniqueName: \"kubernetes.io/projected/357afcbe-1c13-4929-a872-ca9d48a0d1cd-kube-api-access-7n95j\") pod \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.313760 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config-secret\") pod \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.313870 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config\") pod \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\" (UID: \"357afcbe-1c13-4929-a872-ca9d48a0d1cd\") " Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.343097 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/357afcbe-1c13-4929-a872-ca9d48a0d1cd-kube-api-access-7n95j" (OuterVolumeSpecName: "kube-api-access-7n95j") pod "357afcbe-1c13-4929-a872-ca9d48a0d1cd" (UID: "357afcbe-1c13-4929-a872-ca9d48a0d1cd"). InnerVolumeSpecName "kube-api-access-7n95j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.430336 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7n95j\" (UniqueName: \"kubernetes.io/projected/357afcbe-1c13-4929-a872-ca9d48a0d1cd-kube-api-access-7n95j\") on node \"crc\" DevicePath \"\"" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.431083 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.471033 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "357afcbe-1c13-4929-a872-ca9d48a0d1cd" (UID: "357afcbe-1c13-4929-a872-ca9d48a0d1cd"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.534444 5081 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.550824 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "357afcbe-1c13-4929-a872-ca9d48a0d1cd" (UID: "357afcbe-1c13-4929-a872-ca9d48a0d1cd"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.636449 5081 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/357afcbe-1c13-4929-a872-ca9d48a0d1cd-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.759268 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0abfafdf-c49b-4af3-99d0-772c7fb96392","Type":"ContainerStarted","Data":"9f7a32cf3249a811c471c50ff20457fb32eac10f38cbfb796c9d856804549790"} Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.761348 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"0a09ecee-8e55-49a6-9849-b36d11700f3e","Type":"ContainerStarted","Data":"175938d630a13b5f824c4b0eb61f83cd8b689754bf50d8993c7fe802969b8974"} Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.763159 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e19c8b15-8fab-44b0-82d1-5929e7568034","Type":"ContainerStarted","Data":"a1432c8d9f0f03fde1f7833ac209e95fa24779e5f71c882b23d782846ec00217"} Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.763216 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.764619 5081 scope.go:117] "RemoveContainer" containerID="ded69724ea946f0a2544294cc22d841f931373bd41ecdcb0c02078ae0a72bb62" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.764653 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.779800 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.783288 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.783185555 podStartE2EDuration="3.783185555s" podCreationTimestamp="2025-10-03 17:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:13:13.777003138 +0000 UTC m=+6312.742559751" watchObservedRunningTime="2025-10-03 17:13:13.783185555 +0000 UTC m=+6312.748742168" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.800401 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.041389882 podStartE2EDuration="3.800369427s" podCreationTimestamp="2025-10-03 17:13:10 +0000 UTC" firstStartedPulling="2025-10-03 17:13:12.322894116 +0000 UTC m=+6311.288450729" lastFinishedPulling="2025-10-03 17:13:13.081873661 +0000 UTC m=+6312.047430274" observedRunningTime="2025-10-03 17:13:13.797733842 +0000 UTC m=+6312.763290445" watchObservedRunningTime="2025-10-03 17:13:13.800369427 +0000 UTC m=+6312.765926040" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.802624 5081 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" podUID="0abfafdf-c49b-4af3-99d0-772c7fb96392" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.843139 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="357afcbe-1c13-4929-a872-ca9d48a0d1cd" path="/var/lib/kubelet/pods/357afcbe-1c13-4929-a872-ca9d48a0d1cd/volumes" Oct 03 17:13:13 crc kubenswrapper[5081]: I1003 17:13:13.924606 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 03 17:13:13 crc kubenswrapper[5081]: W1003 17:13:13.927998 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod675a2924_f39b_4c35_9411_308db76e69aa.slice/crio-4115d7efe9dd5907c21a2e3f837b02b58ae444a3083f1cc167e286968398f2a8 WatchSource:0}: Error finding container 4115d7efe9dd5907c21a2e3f837b02b58ae444a3083f1cc167e286968398f2a8: Status 404 returned error can't find the container with id 4115d7efe9dd5907c21a2e3f837b02b58ae444a3083f1cc167e286968398f2a8 Oct 03 17:13:14 crc kubenswrapper[5081]: I1003 17:13:14.784916 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"675a2924-f39b-4c35-9411-308db76e69aa","Type":"ContainerStarted","Data":"4115d7efe9dd5907c21a2e3f837b02b58ae444a3083f1cc167e286968398f2a8"} Oct 03 17:13:17 crc kubenswrapper[5081]: I1003 17:13:17.031457 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r4922"] Oct 03 17:13:17 crc kubenswrapper[5081]: I1003 17:13:17.042074 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r4922"] Oct 03 17:13:17 crc kubenswrapper[5081]: I1003 17:13:17.861348 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16d3f66b-7d30-4e2e-a731-bb11a58a3cf6" path="/var/lib/kubelet/pods/16d3f66b-7d30-4e2e-a731-bb11a58a3cf6/volumes" Oct 03 17:13:18 crc kubenswrapper[5081]: I1003 17:13:18.033255 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-78n8t"] Oct 03 17:13:18 crc kubenswrapper[5081]: I1003 17:13:18.044925 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-78n8t"] Oct 03 17:13:19 crc kubenswrapper[5081]: I1003 17:13:19.840071 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1802be7f-0d59-41b3-8e2d-ec4abf6a221a" path="/var/lib/kubelet/pods/1802be7f-0d59-41b3-8e2d-ec4abf6a221a/volumes" Oct 03 17:13:19 crc kubenswrapper[5081]: I1003 17:13:19.867300 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"0a09ecee-8e55-49a6-9849-b36d11700f3e","Type":"ContainerStarted","Data":"bc108c612e265f61e3cffddbe15c8bb5c923d251919282a8ec1e9921b62f41e4"} Oct 03 17:13:19 crc kubenswrapper[5081]: I1003 17:13:19.868797 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"675a2924-f39b-4c35-9411-308db76e69aa","Type":"ContainerStarted","Data":"6d8c4ea454dc6626fa2e67ece9c5dd25a167f69fd1a877ef77aae5e9046de9b5"} Oct 03 17:13:21 crc kubenswrapper[5081]: I1003 17:13:21.157300 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 17:13:24 crc kubenswrapper[5081]: I1003 17:13:24.971967 5081 scope.go:117] "RemoveContainer" containerID="934fae996577b5e325bc12e7ea71af5517c95424d14cdc78d3d2efec358c1451" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.021228 5081 scope.go:117] "RemoveContainer" containerID="aa0ea7d9b2f2e300ca1223288b7b738553819976dfe93b1697e97b39f6e53190" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.056709 5081 scope.go:117] "RemoveContainer" containerID="412d0795d8122ee3ff918dabc055d9da6a6d12bace8257bcb20797875dc4bf3e" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.116612 5081 scope.go:117] "RemoveContainer" containerID="b3364c3692145944bdf73ffbf6be822036d712c4aefbe17ac4872c54b8e0f510" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.174262 5081 scope.go:117] "RemoveContainer" containerID="d244b260857b05e96f73a2f3b782fffecdd493d9c2191fa12e62c333aaeba24b" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.265510 5081 scope.go:117] "RemoveContainer" containerID="bc980de3d642c634828274f02124571b8a21dabd9bb7c5aeb8ca18fc4d50b7ef" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.289028 5081 scope.go:117] "RemoveContainer" containerID="16a94ee498ae34e04f01b07a81641612b16b270ae4b5d071a0eb4b8de678cc5d" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.316755 5081 scope.go:117] "RemoveContainer" containerID="52ca2cf3864549b0fa6d1ef8a3c2cdee015fef36d3283a6344bf6e0b550c9f6d" Oct 03 17:13:25 crc kubenswrapper[5081]: I1003 17:13:25.359541 5081 scope.go:117] "RemoveContainer" containerID="cdeb5b6f69169e55ef27eec62dcc5401a32ebdee0235b33924d9cb0f8742cdff" Oct 03 17:13:26 crc kubenswrapper[5081]: I1003 17:13:26.942176 5081 generic.go:334] "Generic (PLEG): container finished" podID="0a09ecee-8e55-49a6-9849-b36d11700f3e" containerID="bc108c612e265f61e3cffddbe15c8bb5c923d251919282a8ec1e9921b62f41e4" exitCode=0 Oct 03 17:13:26 crc kubenswrapper[5081]: I1003 17:13:26.942270 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"0a09ecee-8e55-49a6-9849-b36d11700f3e","Type":"ContainerDied","Data":"bc108c612e265f61e3cffddbe15c8bb5c923d251919282a8ec1e9921b62f41e4"} Oct 03 17:13:26 crc kubenswrapper[5081]: I1003 17:13:26.944239 5081 generic.go:334] "Generic (PLEG): container finished" podID="675a2924-f39b-4c35-9411-308db76e69aa" containerID="6d8c4ea454dc6626fa2e67ece9c5dd25a167f69fd1a877ef77aae5e9046de9b5" exitCode=0 Oct 03 17:13:26 crc kubenswrapper[5081]: I1003 17:13:26.944282 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"675a2924-f39b-4c35-9411-308db76e69aa","Type":"ContainerDied","Data":"6d8c4ea454dc6626fa2e67ece9c5dd25a167f69fd1a877ef77aae5e9046de9b5"} Oct 03 17:13:29 crc kubenswrapper[5081]: I1003 17:13:29.982095 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"0a09ecee-8e55-49a6-9849-b36d11700f3e","Type":"ContainerStarted","Data":"6225ae2ef466fb65acd66095cf2be0032cb1374d0449159dcbafbac8c5b4a2bb"} Oct 03 17:13:30 crc kubenswrapper[5081]: I1003 17:13:30.647642 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:13:30 crc kubenswrapper[5081]: I1003 17:13:30.648012 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:13:31 crc kubenswrapper[5081]: I1003 17:13:31.042410 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-4dpj8"] Oct 03 17:13:31 crc kubenswrapper[5081]: I1003 17:13:31.056712 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-4dpj8"] Oct 03 17:13:31 crc kubenswrapper[5081]: I1003 17:13:31.846538 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72ebfdb3-5562-4e4b-9b3e-aa3007ae592c" path="/var/lib/kubelet/pods/72ebfdb3-5562-4e4b-9b3e-aa3007ae592c/volumes" Oct 03 17:13:35 crc kubenswrapper[5081]: I1003 17:13:35.029871 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"0a09ecee-8e55-49a6-9849-b36d11700f3e","Type":"ContainerStarted","Data":"756a93ca775ca033fc5b92f48858a0e0301b9943e1179b1f5f260259020c9e90"} Oct 03 17:13:35 crc kubenswrapper[5081]: I1003 17:13:35.032512 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"675a2924-f39b-4c35-9411-308db76e69aa","Type":"ContainerStarted","Data":"c60cf57b1773096d6e7811ac47c6bb6fe33ebd34fee15d0d229360b69f24ac67"} Oct 03 17:13:35 crc kubenswrapper[5081]: I1003 17:13:35.032551 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:35 crc kubenswrapper[5081]: I1003 17:13:35.033098 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Oct 03 17:13:35 crc kubenswrapper[5081]: I1003 17:13:35.053503 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=8.147940744 podStartE2EDuration="24.053487168s" podCreationTimestamp="2025-10-03 17:13:11 +0000 UTC" firstStartedPulling="2025-10-03 17:13:13.517534938 +0000 UTC m=+6312.483091551" lastFinishedPulling="2025-10-03 17:13:29.423081362 +0000 UTC m=+6328.388637975" observedRunningTime="2025-10-03 17:13:35.050176743 +0000 UTC m=+6334.015733366" watchObservedRunningTime="2025-10-03 17:13:35.053487168 +0000 UTC m=+6334.019043781" Oct 03 17:13:38 crc kubenswrapper[5081]: I1003 17:13:38.068963 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"675a2924-f39b-4c35-9411-308db76e69aa","Type":"ContainerStarted","Data":"d6c93ec6db7b4230a5854ab0388acce3236354e79ad161220712cd2f9c760049"} Oct 03 17:13:41 crc kubenswrapper[5081]: I1003 17:13:41.099951 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"675a2924-f39b-4c35-9411-308db76e69aa","Type":"ContainerStarted","Data":"291f59e95c454131e1de838d04b3e721b38e3bc6a8f0def960861ebc1c6aaf41"} Oct 03 17:13:41 crc kubenswrapper[5081]: I1003 17:13:41.124213 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.402915262 podStartE2EDuration="30.124189841s" podCreationTimestamp="2025-10-03 17:13:11 +0000 UTC" firstStartedPulling="2025-10-03 17:13:13.93107599 +0000 UTC m=+6312.896632593" lastFinishedPulling="2025-10-03 17:13:40.652350559 +0000 UTC m=+6339.617907172" observedRunningTime="2025-10-03 17:13:41.120459824 +0000 UTC m=+6340.086016457" watchObservedRunningTime="2025-10-03 17:13:41.124189841 +0000 UTC m=+6340.089746454" Oct 03 17:13:43 crc kubenswrapper[5081]: I1003 17:13:43.294630 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:43 crc kubenswrapper[5081]: I1003 17:13:43.295108 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:43 crc kubenswrapper[5081]: I1003 17:13:43.297062 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:44 crc kubenswrapper[5081]: I1003 17:13:44.126607 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.566553 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.570106 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.578376 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.578704 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.581678 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.675203 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5844\" (UniqueName: \"kubernetes.io/projected/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-kube-api-access-l5844\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.675767 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.676060 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-config-data\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.676277 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-log-httpd\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.676551 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.676761 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-scripts\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.676940 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-run-httpd\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.779648 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5844\" (UniqueName: \"kubernetes.io/projected/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-kube-api-access-l5844\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.779996 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.780079 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-config-data\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.780339 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-log-httpd\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.780402 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.780521 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-scripts\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.780578 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-run-httpd\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.781058 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-log-httpd\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.781517 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-run-httpd\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.786797 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.786948 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-scripts\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.787665 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-config-data\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.793912 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.815544 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5844\" (UniqueName: \"kubernetes.io/projected/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-kube-api-access-l5844\") pod \"ceilometer-0\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " pod="openstack/ceilometer-0" Oct 03 17:13:45 crc kubenswrapper[5081]: I1003 17:13:45.902143 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:13:46 crc kubenswrapper[5081]: I1003 17:13:46.427676 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:13:47 crc kubenswrapper[5081]: I1003 17:13:47.181393 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerStarted","Data":"a21862056c5e1e0ab8ff96fa536b829f33bbadc0c9bbdd0e866714ea94171038"} Oct 03 17:13:48 crc kubenswrapper[5081]: I1003 17:13:48.192357 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerStarted","Data":"4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698"} Oct 03 17:13:48 crc kubenswrapper[5081]: I1003 17:13:48.192976 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerStarted","Data":"9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f"} Oct 03 17:13:49 crc kubenswrapper[5081]: I1003 17:13:49.202728 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerStarted","Data":"4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b"} Oct 03 17:13:51 crc kubenswrapper[5081]: I1003 17:13:51.224360 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerStarted","Data":"9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5"} Oct 03 17:13:51 crc kubenswrapper[5081]: I1003 17:13:51.226507 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 17:13:51 crc kubenswrapper[5081]: I1003 17:13:51.255761 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.60394717 podStartE2EDuration="6.25573982s" podCreationTimestamp="2025-10-03 17:13:45 +0000 UTC" firstStartedPulling="2025-10-03 17:13:46.429522646 +0000 UTC m=+6345.395079259" lastFinishedPulling="2025-10-03 17:13:50.081315296 +0000 UTC m=+6349.046871909" observedRunningTime="2025-10-03 17:13:51.242441699 +0000 UTC m=+6350.207998312" watchObservedRunningTime="2025-10-03 17:13:51.25573982 +0000 UTC m=+6350.221296433" Oct 03 17:13:55 crc kubenswrapper[5081]: I1003 17:13:55.758222 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-mhtmp"] Oct 03 17:13:55 crc kubenswrapper[5081]: I1003 17:13:55.760775 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-mhtmp" Oct 03 17:13:55 crc kubenswrapper[5081]: I1003 17:13:55.770358 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-mhtmp"] Oct 03 17:13:55 crc kubenswrapper[5081]: I1003 17:13:55.837026 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scv4f\" (UniqueName: \"kubernetes.io/projected/dc9afd6e-cad6-434a-ab56-1335ac3964e4-kube-api-access-scv4f\") pod \"aodh-db-create-mhtmp\" (UID: \"dc9afd6e-cad6-434a-ab56-1335ac3964e4\") " pod="openstack/aodh-db-create-mhtmp" Oct 03 17:13:55 crc kubenswrapper[5081]: I1003 17:13:55.939692 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scv4f\" (UniqueName: \"kubernetes.io/projected/dc9afd6e-cad6-434a-ab56-1335ac3964e4-kube-api-access-scv4f\") pod \"aodh-db-create-mhtmp\" (UID: \"dc9afd6e-cad6-434a-ab56-1335ac3964e4\") " pod="openstack/aodh-db-create-mhtmp" Oct 03 17:13:55 crc kubenswrapper[5081]: I1003 17:13:55.959715 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scv4f\" (UniqueName: \"kubernetes.io/projected/dc9afd6e-cad6-434a-ab56-1335ac3964e4-kube-api-access-scv4f\") pod \"aodh-db-create-mhtmp\" (UID: \"dc9afd6e-cad6-434a-ab56-1335ac3964e4\") " pod="openstack/aodh-db-create-mhtmp" Oct 03 17:13:56 crc kubenswrapper[5081]: I1003 17:13:56.090911 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-mhtmp" Oct 03 17:13:56 crc kubenswrapper[5081]: I1003 17:13:56.635960 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-mhtmp"] Oct 03 17:13:57 crc kubenswrapper[5081]: I1003 17:13:57.309422 5081 generic.go:334] "Generic (PLEG): container finished" podID="dc9afd6e-cad6-434a-ab56-1335ac3964e4" containerID="568803f51cef577818e0553d05beec547239bcc4bb431e1807b1f7a1cab33c15" exitCode=0 Oct 03 17:13:57 crc kubenswrapper[5081]: I1003 17:13:57.309510 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-mhtmp" event={"ID":"dc9afd6e-cad6-434a-ab56-1335ac3964e4","Type":"ContainerDied","Data":"568803f51cef577818e0553d05beec547239bcc4bb431e1807b1f7a1cab33c15"} Oct 03 17:13:57 crc kubenswrapper[5081]: I1003 17:13:57.309803 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-mhtmp" event={"ID":"dc9afd6e-cad6-434a-ab56-1335ac3964e4","Type":"ContainerStarted","Data":"a1d9c7e357bcdeff7f7b7d3c49905f2d68222bf9af0e7bd3d5abec1034f210f9"} Oct 03 17:13:58 crc kubenswrapper[5081]: I1003 17:13:58.751826 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-mhtmp" Oct 03 17:13:58 crc kubenswrapper[5081]: I1003 17:13:58.916453 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scv4f\" (UniqueName: \"kubernetes.io/projected/dc9afd6e-cad6-434a-ab56-1335ac3964e4-kube-api-access-scv4f\") pod \"dc9afd6e-cad6-434a-ab56-1335ac3964e4\" (UID: \"dc9afd6e-cad6-434a-ab56-1335ac3964e4\") " Oct 03 17:13:58 crc kubenswrapper[5081]: I1003 17:13:58.922414 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc9afd6e-cad6-434a-ab56-1335ac3964e4-kube-api-access-scv4f" (OuterVolumeSpecName: "kube-api-access-scv4f") pod "dc9afd6e-cad6-434a-ab56-1335ac3964e4" (UID: "dc9afd6e-cad6-434a-ab56-1335ac3964e4"). InnerVolumeSpecName "kube-api-access-scv4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:13:59 crc kubenswrapper[5081]: I1003 17:13:59.019055 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scv4f\" (UniqueName: \"kubernetes.io/projected/dc9afd6e-cad6-434a-ab56-1335ac3964e4-kube-api-access-scv4f\") on node \"crc\" DevicePath \"\"" Oct 03 17:13:59 crc kubenswrapper[5081]: I1003 17:13:59.336545 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-mhtmp" event={"ID":"dc9afd6e-cad6-434a-ab56-1335ac3964e4","Type":"ContainerDied","Data":"a1d9c7e357bcdeff7f7b7d3c49905f2d68222bf9af0e7bd3d5abec1034f210f9"} Oct 03 17:13:59 crc kubenswrapper[5081]: I1003 17:13:59.336608 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1d9c7e357bcdeff7f7b7d3c49905f2d68222bf9af0e7bd3d5abec1034f210f9" Oct 03 17:13:59 crc kubenswrapper[5081]: I1003 17:13:59.336673 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-mhtmp" Oct 03 17:14:00 crc kubenswrapper[5081]: I1003 17:14:00.647180 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:14:00 crc kubenswrapper[5081]: I1003 17:14:00.647496 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:14:05 crc kubenswrapper[5081]: I1003 17:14:05.963105 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-5655-account-create-cxlsl"] Oct 03 17:14:05 crc kubenswrapper[5081]: E1003 17:14:05.964184 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9afd6e-cad6-434a-ab56-1335ac3964e4" containerName="mariadb-database-create" Oct 03 17:14:05 crc kubenswrapper[5081]: I1003 17:14:05.964198 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9afd6e-cad6-434a-ab56-1335ac3964e4" containerName="mariadb-database-create" Oct 03 17:14:05 crc kubenswrapper[5081]: I1003 17:14:05.964468 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc9afd6e-cad6-434a-ab56-1335ac3964e4" containerName="mariadb-database-create" Oct 03 17:14:05 crc kubenswrapper[5081]: I1003 17:14:05.965424 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5655-account-create-cxlsl" Oct 03 17:14:05 crc kubenswrapper[5081]: I1003 17:14:05.967700 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Oct 03 17:14:05 crc kubenswrapper[5081]: I1003 17:14:05.972252 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-5655-account-create-cxlsl"] Oct 03 17:14:06 crc kubenswrapper[5081]: I1003 17:14:06.072022 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7thww\" (UniqueName: \"kubernetes.io/projected/6eb9e7af-18ff-45e4-a6ce-12ebc838807b-kube-api-access-7thww\") pod \"aodh-5655-account-create-cxlsl\" (UID: \"6eb9e7af-18ff-45e4-a6ce-12ebc838807b\") " pod="openstack/aodh-5655-account-create-cxlsl" Oct 03 17:14:06 crc kubenswrapper[5081]: I1003 17:14:06.173919 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7thww\" (UniqueName: \"kubernetes.io/projected/6eb9e7af-18ff-45e4-a6ce-12ebc838807b-kube-api-access-7thww\") pod \"aodh-5655-account-create-cxlsl\" (UID: \"6eb9e7af-18ff-45e4-a6ce-12ebc838807b\") " pod="openstack/aodh-5655-account-create-cxlsl" Oct 03 17:14:06 crc kubenswrapper[5081]: I1003 17:14:06.197221 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7thww\" (UniqueName: \"kubernetes.io/projected/6eb9e7af-18ff-45e4-a6ce-12ebc838807b-kube-api-access-7thww\") pod \"aodh-5655-account-create-cxlsl\" (UID: \"6eb9e7af-18ff-45e4-a6ce-12ebc838807b\") " pod="openstack/aodh-5655-account-create-cxlsl" Oct 03 17:14:06 crc kubenswrapper[5081]: I1003 17:14:06.295515 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5655-account-create-cxlsl" Oct 03 17:14:06 crc kubenswrapper[5081]: I1003 17:14:06.783703 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-5655-account-create-cxlsl"] Oct 03 17:14:07 crc kubenswrapper[5081]: I1003 17:14:07.433164 5081 generic.go:334] "Generic (PLEG): container finished" podID="6eb9e7af-18ff-45e4-a6ce-12ebc838807b" containerID="30d125323fb8a2991ac92c59b5225a4c1d82aa58b5b1f738e1633b92bc60faf3" exitCode=0 Oct 03 17:14:07 crc kubenswrapper[5081]: I1003 17:14:07.433215 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5655-account-create-cxlsl" event={"ID":"6eb9e7af-18ff-45e4-a6ce-12ebc838807b","Type":"ContainerDied","Data":"30d125323fb8a2991ac92c59b5225a4c1d82aa58b5b1f738e1633b92bc60faf3"} Oct 03 17:14:07 crc kubenswrapper[5081]: I1003 17:14:07.433241 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5655-account-create-cxlsl" event={"ID":"6eb9e7af-18ff-45e4-a6ce-12ebc838807b","Type":"ContainerStarted","Data":"b21ab26e231b977f165da7ff5adb88b5778aa5763b21d561a256c2aaec58f582"} Oct 03 17:14:08 crc kubenswrapper[5081]: I1003 17:14:08.896319 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5655-account-create-cxlsl" Oct 03 17:14:09 crc kubenswrapper[5081]: I1003 17:14:09.037354 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7thww\" (UniqueName: \"kubernetes.io/projected/6eb9e7af-18ff-45e4-a6ce-12ebc838807b-kube-api-access-7thww\") pod \"6eb9e7af-18ff-45e4-a6ce-12ebc838807b\" (UID: \"6eb9e7af-18ff-45e4-a6ce-12ebc838807b\") " Oct 03 17:14:09 crc kubenswrapper[5081]: I1003 17:14:09.048307 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eb9e7af-18ff-45e4-a6ce-12ebc838807b-kube-api-access-7thww" (OuterVolumeSpecName: "kube-api-access-7thww") pod "6eb9e7af-18ff-45e4-a6ce-12ebc838807b" (UID: "6eb9e7af-18ff-45e4-a6ce-12ebc838807b"). InnerVolumeSpecName "kube-api-access-7thww". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:14:09 crc kubenswrapper[5081]: I1003 17:14:09.140832 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7thww\" (UniqueName: \"kubernetes.io/projected/6eb9e7af-18ff-45e4-a6ce-12ebc838807b-kube-api-access-7thww\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:09 crc kubenswrapper[5081]: I1003 17:14:09.450884 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5655-account-create-cxlsl" event={"ID":"6eb9e7af-18ff-45e4-a6ce-12ebc838807b","Type":"ContainerDied","Data":"b21ab26e231b977f165da7ff5adb88b5778aa5763b21d561a256c2aaec58f582"} Oct 03 17:14:09 crc kubenswrapper[5081]: I1003 17:14:09.450926 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b21ab26e231b977f165da7ff5adb88b5778aa5763b21d561a256c2aaec58f582" Oct 03 17:14:09 crc kubenswrapper[5081]: I1003 17:14:09.450944 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5655-account-create-cxlsl" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.494306 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-kdl8p"] Oct 03 17:14:11 crc kubenswrapper[5081]: E1003 17:14:11.496639 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb9e7af-18ff-45e4-a6ce-12ebc838807b" containerName="mariadb-account-create" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.496695 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb9e7af-18ff-45e4-a6ce-12ebc838807b" containerName="mariadb-account-create" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.497324 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eb9e7af-18ff-45e4-a6ce-12ebc838807b" containerName="mariadb-account-create" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.499147 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.506212 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.506353 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.506695 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-smgk2" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.532114 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-kdl8p"] Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.590165 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-config-data\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.590243 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-scripts\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.590266 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-combined-ca-bundle\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.590297 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqw5d\" (UniqueName: \"kubernetes.io/projected/a6259e68-df51-461f-b4bf-43269250f8aa-kube-api-access-qqw5d\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.692366 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-config-data\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.692683 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-scripts\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.692825 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-combined-ca-bundle\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.692946 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqw5d\" (UniqueName: \"kubernetes.io/projected/a6259e68-df51-461f-b4bf-43269250f8aa-kube-api-access-qqw5d\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.698176 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-scripts\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.706202 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-combined-ca-bundle\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.706557 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-config-data\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.715005 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqw5d\" (UniqueName: \"kubernetes.io/projected/a6259e68-df51-461f-b4bf-43269250f8aa-kube-api-access-qqw5d\") pod \"aodh-db-sync-kdl8p\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:11 crc kubenswrapper[5081]: I1003 17:14:11.830226 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:12 crc kubenswrapper[5081]: I1003 17:14:12.385958 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-kdl8p"] Oct 03 17:14:12 crc kubenswrapper[5081]: W1003 17:14:12.392833 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6259e68_df51_461f_b4bf_43269250f8aa.slice/crio-f0badcaafe24ad8a94cc4191a8467e82b756a418c9a87a11bc7e0e7399c277eb WatchSource:0}: Error finding container f0badcaafe24ad8a94cc4191a8467e82b756a418c9a87a11bc7e0e7399c277eb: Status 404 returned error can't find the container with id f0badcaafe24ad8a94cc4191a8467e82b756a418c9a87a11bc7e0e7399c277eb Oct 03 17:14:12 crc kubenswrapper[5081]: I1003 17:14:12.487045 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-kdl8p" event={"ID":"a6259e68-df51-461f-b4bf-43269250f8aa","Type":"ContainerStarted","Data":"f0badcaafe24ad8a94cc4191a8467e82b756a418c9a87a11bc7e0e7399c277eb"} Oct 03 17:14:15 crc kubenswrapper[5081]: I1003 17:14:15.036647 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-lhfpn"] Oct 03 17:14:15 crc kubenswrapper[5081]: I1003 17:14:15.049355 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-lhfpn"] Oct 03 17:14:15 crc kubenswrapper[5081]: I1003 17:14:15.843088 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3b37e5d-f3e7-4362-a5fe-84df1d174d7d" path="/var/lib/kubelet/pods/e3b37e5d-f3e7-4362-a5fe-84df1d174d7d/volumes" Oct 03 17:14:15 crc kubenswrapper[5081]: I1003 17:14:15.932634 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 17:14:17 crc kubenswrapper[5081]: I1003 17:14:17.541670 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-kdl8p" event={"ID":"a6259e68-df51-461f-b4bf-43269250f8aa","Type":"ContainerStarted","Data":"4e135bf1c50a68855dea7373ad768bfb53d71f35c3795280490c6c398241013d"} Oct 03 17:14:17 crc kubenswrapper[5081]: I1003 17:14:17.562879 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-kdl8p" podStartSLOduration=2.704912153 podStartE2EDuration="6.562860907s" podCreationTimestamp="2025-10-03 17:14:11 +0000 UTC" firstStartedPulling="2025-10-03 17:14:12.39819248 +0000 UTC m=+6371.363749093" lastFinishedPulling="2025-10-03 17:14:16.256141234 +0000 UTC m=+6375.221697847" observedRunningTime="2025-10-03 17:14:17.560441257 +0000 UTC m=+6376.525997890" watchObservedRunningTime="2025-10-03 17:14:17.562860907 +0000 UTC m=+6376.528417510" Oct 03 17:14:19 crc kubenswrapper[5081]: I1003 17:14:19.580937 5081 generic.go:334] "Generic (PLEG): container finished" podID="a6259e68-df51-461f-b4bf-43269250f8aa" containerID="4e135bf1c50a68855dea7373ad768bfb53d71f35c3795280490c6c398241013d" exitCode=0 Oct 03 17:14:19 crc kubenswrapper[5081]: I1003 17:14:19.581483 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-kdl8p" event={"ID":"a6259e68-df51-461f-b4bf-43269250f8aa","Type":"ContainerDied","Data":"4e135bf1c50a68855dea7373ad768bfb53d71f35c3795280490c6c398241013d"} Oct 03 17:14:20 crc kubenswrapper[5081]: I1003 17:14:20.914183 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.036698 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-combined-ca-bundle\") pod \"a6259e68-df51-461f-b4bf-43269250f8aa\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.036795 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-config-data\") pod \"a6259e68-df51-461f-b4bf-43269250f8aa\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.037084 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqw5d\" (UniqueName: \"kubernetes.io/projected/a6259e68-df51-461f-b4bf-43269250f8aa-kube-api-access-qqw5d\") pod \"a6259e68-df51-461f-b4bf-43269250f8aa\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.037126 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-scripts\") pod \"a6259e68-df51-461f-b4bf-43269250f8aa\" (UID: \"a6259e68-df51-461f-b4bf-43269250f8aa\") " Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.046069 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6259e68-df51-461f-b4bf-43269250f8aa-kube-api-access-qqw5d" (OuterVolumeSpecName: "kube-api-access-qqw5d") pod "a6259e68-df51-461f-b4bf-43269250f8aa" (UID: "a6259e68-df51-461f-b4bf-43269250f8aa"). InnerVolumeSpecName "kube-api-access-qqw5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.048185 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-scripts" (OuterVolumeSpecName: "scripts") pod "a6259e68-df51-461f-b4bf-43269250f8aa" (UID: "a6259e68-df51-461f-b4bf-43269250f8aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.075327 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6259e68-df51-461f-b4bf-43269250f8aa" (UID: "a6259e68-df51-461f-b4bf-43269250f8aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.075535 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-config-data" (OuterVolumeSpecName: "config-data") pod "a6259e68-df51-461f-b4bf-43269250f8aa" (UID: "a6259e68-df51-461f-b4bf-43269250f8aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.140104 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqw5d\" (UniqueName: \"kubernetes.io/projected/a6259e68-df51-461f-b4bf-43269250f8aa-kube-api-access-qqw5d\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.140153 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.140162 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.140170 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6259e68-df51-461f-b4bf-43269250f8aa-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.598669 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-kdl8p" event={"ID":"a6259e68-df51-461f-b4bf-43269250f8aa","Type":"ContainerDied","Data":"f0badcaafe24ad8a94cc4191a8467e82b756a418c9a87a11bc7e0e7399c277eb"} Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.598707 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0badcaafe24ad8a94cc4191a8467e82b756a418c9a87a11bc7e0e7399c277eb" Oct 03 17:14:21 crc kubenswrapper[5081]: I1003 17:14:21.598757 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-kdl8p" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.055603 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-3601-account-create-xdcn7"] Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.069427 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-3601-account-create-xdcn7"] Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.613161 5081 scope.go:117] "RemoveContainer" containerID="8b39383cc60a98336c0e21512c1eb662697ed32d290d84bbc73f8745c2b75599" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.654926 5081 scope.go:117] "RemoveContainer" containerID="15853a92d96f672769e2201a37ae361e4f8365b01ec8448a672e59bb4e7f2383" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.724672 5081 scope.go:117] "RemoveContainer" containerID="3fec7416e4463a5308623f6c7c7357fd93a83a3e93fb8c7bd0aae18cf4ca503c" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.889437 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78df042a-965a-4611-9d0a-194098083135" path="/var/lib/kubelet/pods/78df042a-965a-4611-9d0a-194098083135/volumes" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.890243 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Oct 03 17:14:25 crc kubenswrapper[5081]: E1003 17:14:25.890580 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6259e68-df51-461f-b4bf-43269250f8aa" containerName="aodh-db-sync" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.890592 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6259e68-df51-461f-b4bf-43269250f8aa" containerName="aodh-db-sync" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.890823 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6259e68-df51-461f-b4bf-43269250f8aa" containerName="aodh-db-sync" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.898540 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.898668 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.901732 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.903064 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-smgk2" Oct 03 17:14:25 crc kubenswrapper[5081]: I1003 17:14:25.903297 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.057542 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-combined-ca-bundle\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.057930 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-config-data\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.057999 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-scripts\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.058052 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z8b8\" (UniqueName: \"kubernetes.io/projected/225c5210-2413-4098-8d82-9907db3aad43-kube-api-access-9z8b8\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.160206 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-combined-ca-bundle\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.160272 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-config-data\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.160356 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-scripts\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.160439 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z8b8\" (UniqueName: \"kubernetes.io/projected/225c5210-2413-4098-8d82-9907db3aad43-kube-api-access-9z8b8\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.167707 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-combined-ca-bundle\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.168032 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-config-data\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.177414 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/225c5210-2413-4098-8d82-9907db3aad43-scripts\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.178146 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z8b8\" (UniqueName: \"kubernetes.io/projected/225c5210-2413-4098-8d82-9907db3aad43-kube-api-access-9z8b8\") pod \"aodh-0\" (UID: \"225c5210-2413-4098-8d82-9907db3aad43\") " pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.224333 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Oct 03 17:14:26 crc kubenswrapper[5081]: I1003 17:14:26.731350 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Oct 03 17:14:27 crc kubenswrapper[5081]: I1003 17:14:27.668367 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"225c5210-2413-4098-8d82-9907db3aad43","Type":"ContainerStarted","Data":"a19b049c35df235ef659e3af2de3e2d62b296be7da27216a89e27dbc335986b9"} Oct 03 17:14:27 crc kubenswrapper[5081]: I1003 17:14:27.668793 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"225c5210-2413-4098-8d82-9907db3aad43","Type":"ContainerStarted","Data":"8b229d5dfb88f53283cdce53d14401de5a3a7ce095da63c556c6cb34285c42e2"} Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.349379 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.349737 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-central-agent" containerID="cri-o://9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f" gracePeriod=30 Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.349805 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="sg-core" containerID="cri-o://4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b" gracePeriod=30 Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.349813 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="proxy-httpd" containerID="cri-o://9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5" gracePeriod=30 Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.349861 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-notification-agent" containerID="cri-o://4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698" gracePeriod=30 Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.680068 5081 generic.go:334] "Generic (PLEG): container finished" podID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerID="9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5" exitCode=0 Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.680367 5081 generic.go:334] "Generic (PLEG): container finished" podID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerID="4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b" exitCode=2 Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.680220 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerDied","Data":"9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5"} Oct 03 17:14:28 crc kubenswrapper[5081]: I1003 17:14:28.680403 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerDied","Data":"4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b"} Oct 03 17:14:29 crc kubenswrapper[5081]: I1003 17:14:29.692493 5081 generic.go:334] "Generic (PLEG): container finished" podID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerID="9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f" exitCode=0 Oct 03 17:14:29 crc kubenswrapper[5081]: I1003 17:14:29.692552 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerDied","Data":"9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f"} Oct 03 17:14:29 crc kubenswrapper[5081]: I1003 17:14:29.694942 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"225c5210-2413-4098-8d82-9907db3aad43","Type":"ContainerStarted","Data":"3eab98b37bdf2986242fb201fe4d7bb2ea61726b190b77324f3c043f81d0ce48"} Oct 03 17:14:30 crc kubenswrapper[5081]: I1003 17:14:30.647667 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:14:30 crc kubenswrapper[5081]: I1003 17:14:30.647964 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:14:30 crc kubenswrapper[5081]: I1003 17:14:30.648004 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:14:30 crc kubenswrapper[5081]: I1003 17:14:30.648785 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:14:30 crc kubenswrapper[5081]: I1003 17:14:30.648840 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" gracePeriod=600 Oct 03 17:14:30 crc kubenswrapper[5081]: I1003 17:14:30.710356 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"225c5210-2413-4098-8d82-9907db3aad43","Type":"ContainerStarted","Data":"6ad2570ed5530f906ba6ea15c130964b7d2ad4c0b7b54c9d30c9a89dc9cb9422"} Oct 03 17:14:30 crc kubenswrapper[5081]: E1003 17:14:30.776647 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.252500 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.387717 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-scripts\") pod \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.387829 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5844\" (UniqueName: \"kubernetes.io/projected/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-kube-api-access-l5844\") pod \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.387896 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-sg-core-conf-yaml\") pod \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.387994 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-log-httpd\") pod \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.388030 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-config-data\") pod \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.388147 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-run-httpd\") pod \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.388225 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-combined-ca-bundle\") pod \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\" (UID: \"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef\") " Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.388722 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" (UID: "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.388939 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.388939 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" (UID: "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.397913 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-kube-api-access-l5844" (OuterVolumeSpecName: "kube-api-access-l5844") pod "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" (UID: "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef"). InnerVolumeSpecName "kube-api-access-l5844". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.399811 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-scripts" (OuterVolumeSpecName: "scripts") pod "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" (UID: "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.418824 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" (UID: "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.491353 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.491552 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.491672 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5844\" (UniqueName: \"kubernetes.io/projected/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-kube-api-access-l5844\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.491761 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.502940 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" (UID: "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.520386 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-config-data" (OuterVolumeSpecName: "config-data") pod "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" (UID: "ba19a5a8-4c4b-4707-b35d-eab595b1b6ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.594467 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.594503 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.722281 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" exitCode=0 Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.722479 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f"} Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.723018 5081 scope.go:117] "RemoveContainer" containerID="ba3c449f9ba6ce0a07d4ade825ef0ef780d0fc3b5448da6b455aae74bb2d7b4b" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.723926 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.724354 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.733168 5081 generic.go:334] "Generic (PLEG): container finished" podID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerID="4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698" exitCode=0 Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.733213 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerDied","Data":"4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698"} Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.733244 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ba19a5a8-4c4b-4707-b35d-eab595b1b6ef","Type":"ContainerDied","Data":"a21862056c5e1e0ab8ff96fa536b829f33bbadc0c9bbdd0e866714ea94171038"} Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.733344 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.756999 5081 scope.go:117] "RemoveContainer" containerID="9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.782749 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.793958 5081 scope.go:117] "RemoveContainer" containerID="4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.804380 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.819647 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.820121 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="sg-core" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820136 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="sg-core" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.820153 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-notification-agent" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820160 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-notification-agent" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.820191 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="proxy-httpd" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820201 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="proxy-httpd" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.820214 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-central-agent" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820219 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-central-agent" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820423 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="proxy-httpd" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820438 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-central-agent" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820452 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="ceilometer-notification-agent" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.820469 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" containerName="sg-core" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.822413 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.830081 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.833825 5081 scope.go:117] "RemoveContainer" containerID="4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.834089 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.834231 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.865378 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba19a5a8-4c4b-4707-b35d-eab595b1b6ef" path="/var/lib/kubelet/pods/ba19a5a8-4c4b-4707-b35d-eab595b1b6ef/volumes" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.893709 5081 scope.go:117] "RemoveContainer" containerID="9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.904335 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-scripts\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.904473 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mckrw\" (UniqueName: \"kubernetes.io/projected/b3462020-4c50-47bb-880f-de9fcbc47c4c-kube-api-access-mckrw\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.904578 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-run-httpd\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.904628 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-log-httpd\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.904652 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-config-data\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.904728 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.904926 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.919457 5081 scope.go:117] "RemoveContainer" containerID="9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.920104 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5\": container with ID starting with 9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5 not found: ID does not exist" containerID="9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.920141 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5"} err="failed to get container status \"9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5\": rpc error: code = NotFound desc = could not find container \"9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5\": container with ID starting with 9acb864d38f00275442a8d8f5ae25342e39f15a9f2458c81c11b47e2396083c5 not found: ID does not exist" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.920165 5081 scope.go:117] "RemoveContainer" containerID="4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.920498 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b\": container with ID starting with 4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b not found: ID does not exist" containerID="4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.920526 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b"} err="failed to get container status \"4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b\": rpc error: code = NotFound desc = could not find container \"4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b\": container with ID starting with 4f52d9ab34963ccd645473b6ed234ad47aac793951e2b2643d38f7f636ba786b not found: ID does not exist" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.920541 5081 scope.go:117] "RemoveContainer" containerID="4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.920801 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698\": container with ID starting with 4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698 not found: ID does not exist" containerID="4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.920832 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698"} err="failed to get container status \"4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698\": rpc error: code = NotFound desc = could not find container \"4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698\": container with ID starting with 4f702588f344d3a9512f1812c9f2f7770b75bbac0d34bb0d4714b72d3b2a2698 not found: ID does not exist" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.920849 5081 scope.go:117] "RemoveContainer" containerID="9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f" Oct 03 17:14:31 crc kubenswrapper[5081]: E1003 17:14:31.921051 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f\": container with ID starting with 9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f not found: ID does not exist" containerID="9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f" Oct 03 17:14:31 crc kubenswrapper[5081]: I1003 17:14:31.921075 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f"} err="failed to get container status \"9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f\": rpc error: code = NotFound desc = could not find container \"9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f\": container with ID starting with 9167c504d651027260cf569af39b627bd25b4953e6f9d81a501960569864cd2f not found: ID does not exist" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.007544 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.007693 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-scripts\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.007745 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mckrw\" (UniqueName: \"kubernetes.io/projected/b3462020-4c50-47bb-880f-de9fcbc47c4c-kube-api-access-mckrw\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.007796 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-run-httpd\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.008000 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-log-httpd\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.008021 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-config-data\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.008062 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.008728 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-run-httpd\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.008996 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-log-httpd\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.013697 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.013758 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.015996 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-config-data\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.016824 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-scripts\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.029319 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mckrw\" (UniqueName: \"kubernetes.io/projected/b3462020-4c50-47bb-880f-de9fcbc47c4c-kube-api-access-mckrw\") pod \"ceilometer-0\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.174340 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.660227 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:14:32 crc kubenswrapper[5081]: W1003 17:14:32.664481 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3462020_4c50_47bb_880f_de9fcbc47c4c.slice/crio-e16f2a4486af8cbe47c531566d42980080bcd611c2220a6a1d35659052473dd4 WatchSource:0}: Error finding container e16f2a4486af8cbe47c531566d42980080bcd611c2220a6a1d35659052473dd4: Status 404 returned error can't find the container with id e16f2a4486af8cbe47c531566d42980080bcd611c2220a6a1d35659052473dd4 Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.744599 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerStarted","Data":"e16f2a4486af8cbe47c531566d42980080bcd611c2220a6a1d35659052473dd4"} Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.750342 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"225c5210-2413-4098-8d82-9907db3aad43","Type":"ContainerStarted","Data":"7fe2d6a768590c5ef8971f07372991c9da91b376bf87b139cd0308b840d36ad6"} Oct 03 17:14:32 crc kubenswrapper[5081]: I1003 17:14:32.778570 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.899630511 podStartE2EDuration="7.778535204s" podCreationTimestamp="2025-10-03 17:14:25 +0000 UTC" firstStartedPulling="2025-10-03 17:14:26.735117822 +0000 UTC m=+6385.700674425" lastFinishedPulling="2025-10-03 17:14:31.614022505 +0000 UTC m=+6390.579579118" observedRunningTime="2025-10-03 17:14:32.768035203 +0000 UTC m=+6391.733591816" watchObservedRunningTime="2025-10-03 17:14:32.778535204 +0000 UTC m=+6391.744091817" Oct 03 17:14:33 crc kubenswrapper[5081]: I1003 17:14:33.029104 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-gvzpw"] Oct 03 17:14:33 crc kubenswrapper[5081]: I1003 17:14:33.038866 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-gvzpw"] Oct 03 17:14:33 crc kubenswrapper[5081]: I1003 17:14:33.760857 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerStarted","Data":"fb2e73bf31bb4c94c33e13304fed5fa404009f11913a0ba5d0b677c7a5fc6480"} Oct 03 17:14:33 crc kubenswrapper[5081]: I1003 17:14:33.846079 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="196ad8b4-8438-4dd2-8062-cd48ce2c8fca" path="/var/lib/kubelet/pods/196ad8b4-8438-4dd2-8062-cd48ce2c8fca/volumes" Oct 03 17:14:34 crc kubenswrapper[5081]: I1003 17:14:34.781868 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerStarted","Data":"c119038b76ae4db623d13e3a79af678922ad264506f6f32b20ebbb64302ed8c1"} Oct 03 17:14:35 crc kubenswrapper[5081]: I1003 17:14:35.792787 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerStarted","Data":"72c2504e339374ee0480e77b3731911ef989887cd77ed65631095a734dc4ba55"} Oct 03 17:14:36 crc kubenswrapper[5081]: I1003 17:14:36.811322 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerStarted","Data":"c79b4e2ecaa7e3121432a883894d5ec8e6ddd55cf3407414fc8ae91e5b5fcb81"} Oct 03 17:14:36 crc kubenswrapper[5081]: I1003 17:14:36.812912 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 17:14:36 crc kubenswrapper[5081]: I1003 17:14:36.839639 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.044847231 podStartE2EDuration="5.838231606s" podCreationTimestamp="2025-10-03 17:14:31 +0000 UTC" firstStartedPulling="2025-10-03 17:14:32.667285118 +0000 UTC m=+6391.632841731" lastFinishedPulling="2025-10-03 17:14:36.460669493 +0000 UTC m=+6395.426226106" observedRunningTime="2025-10-03 17:14:36.827697194 +0000 UTC m=+6395.793253827" watchObservedRunningTime="2025-10-03 17:14:36.838231606 +0000 UTC m=+6395.803788239" Oct 03 17:14:38 crc kubenswrapper[5081]: I1003 17:14:38.748013 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-5wkcf"] Oct 03 17:14:38 crc kubenswrapper[5081]: I1003 17:14:38.749977 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5wkcf" Oct 03 17:14:38 crc kubenswrapper[5081]: I1003 17:14:38.760836 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-5wkcf"] Oct 03 17:14:38 crc kubenswrapper[5081]: I1003 17:14:38.847947 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksdw9\" (UniqueName: \"kubernetes.io/projected/2f2385d1-a278-41cc-ba7c-f61641330a6d-kube-api-access-ksdw9\") pod \"manila-db-create-5wkcf\" (UID: \"2f2385d1-a278-41cc-ba7c-f61641330a6d\") " pod="openstack/manila-db-create-5wkcf" Oct 03 17:14:38 crc kubenswrapper[5081]: I1003 17:14:38.949896 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksdw9\" (UniqueName: \"kubernetes.io/projected/2f2385d1-a278-41cc-ba7c-f61641330a6d-kube-api-access-ksdw9\") pod \"manila-db-create-5wkcf\" (UID: \"2f2385d1-a278-41cc-ba7c-f61641330a6d\") " pod="openstack/manila-db-create-5wkcf" Oct 03 17:14:38 crc kubenswrapper[5081]: I1003 17:14:38.977730 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksdw9\" (UniqueName: \"kubernetes.io/projected/2f2385d1-a278-41cc-ba7c-f61641330a6d-kube-api-access-ksdw9\") pod \"manila-db-create-5wkcf\" (UID: \"2f2385d1-a278-41cc-ba7c-f61641330a6d\") " pod="openstack/manila-db-create-5wkcf" Oct 03 17:14:39 crc kubenswrapper[5081]: I1003 17:14:39.076248 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5wkcf" Oct 03 17:14:39 crc kubenswrapper[5081]: I1003 17:14:39.625750 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-5wkcf"] Oct 03 17:14:39 crc kubenswrapper[5081]: I1003 17:14:39.842496 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-5wkcf" event={"ID":"2f2385d1-a278-41cc-ba7c-f61641330a6d","Type":"ContainerStarted","Data":"f17232ee9f9a4cb28e6b1150f3edfeea2be86cba633912db7460115a86b4be76"} Oct 03 17:14:39 crc kubenswrapper[5081]: I1003 17:14:39.842547 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-5wkcf" event={"ID":"2f2385d1-a278-41cc-ba7c-f61641330a6d","Type":"ContainerStarted","Data":"684e851b2319d2c0c2e793315ebd18b58b6645a8080d986fc297fd05f8b26617"} Oct 03 17:14:39 crc kubenswrapper[5081]: I1003 17:14:39.862462 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-5wkcf" podStartSLOduration=1.862443953 podStartE2EDuration="1.862443953s" podCreationTimestamp="2025-10-03 17:14:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:14:39.856857683 +0000 UTC m=+6398.822414326" watchObservedRunningTime="2025-10-03 17:14:39.862443953 +0000 UTC m=+6398.828000566" Oct 03 17:14:40 crc kubenswrapper[5081]: I1003 17:14:40.852251 5081 generic.go:334] "Generic (PLEG): container finished" podID="2f2385d1-a278-41cc-ba7c-f61641330a6d" containerID="f17232ee9f9a4cb28e6b1150f3edfeea2be86cba633912db7460115a86b4be76" exitCode=0 Oct 03 17:14:40 crc kubenswrapper[5081]: I1003 17:14:40.852664 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-5wkcf" event={"ID":"2f2385d1-a278-41cc-ba7c-f61641330a6d","Type":"ContainerDied","Data":"f17232ee9f9a4cb28e6b1150f3edfeea2be86cba633912db7460115a86b4be76"} Oct 03 17:14:42 crc kubenswrapper[5081]: I1003 17:14:42.268078 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5wkcf" Oct 03 17:14:42 crc kubenswrapper[5081]: I1003 17:14:42.421915 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksdw9\" (UniqueName: \"kubernetes.io/projected/2f2385d1-a278-41cc-ba7c-f61641330a6d-kube-api-access-ksdw9\") pod \"2f2385d1-a278-41cc-ba7c-f61641330a6d\" (UID: \"2f2385d1-a278-41cc-ba7c-f61641330a6d\") " Oct 03 17:14:42 crc kubenswrapper[5081]: I1003 17:14:42.429145 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f2385d1-a278-41cc-ba7c-f61641330a6d-kube-api-access-ksdw9" (OuterVolumeSpecName: "kube-api-access-ksdw9") pod "2f2385d1-a278-41cc-ba7c-f61641330a6d" (UID: "2f2385d1-a278-41cc-ba7c-f61641330a6d"). InnerVolumeSpecName "kube-api-access-ksdw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:14:42 crc kubenswrapper[5081]: I1003 17:14:42.528777 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksdw9\" (UniqueName: \"kubernetes.io/projected/2f2385d1-a278-41cc-ba7c-f61641330a6d-kube-api-access-ksdw9\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:42 crc kubenswrapper[5081]: I1003 17:14:42.871591 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-5wkcf" event={"ID":"2f2385d1-a278-41cc-ba7c-f61641330a6d","Type":"ContainerDied","Data":"684e851b2319d2c0c2e793315ebd18b58b6645a8080d986fc297fd05f8b26617"} Oct 03 17:14:42 crc kubenswrapper[5081]: I1003 17:14:42.871637 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="684e851b2319d2c0c2e793315ebd18b58b6645a8080d986fc297fd05f8b26617" Oct 03 17:14:42 crc kubenswrapper[5081]: I1003 17:14:42.871692 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5wkcf" Oct 03 17:14:45 crc kubenswrapper[5081]: I1003 17:14:45.828242 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:14:45 crc kubenswrapper[5081]: E1003 17:14:45.828971 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:14:48 crc kubenswrapper[5081]: I1003 17:14:48.855561 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-3530-account-create-brtxv"] Oct 03 17:14:48 crc kubenswrapper[5081]: E1003 17:14:48.856631 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f2385d1-a278-41cc-ba7c-f61641330a6d" containerName="mariadb-database-create" Oct 03 17:14:48 crc kubenswrapper[5081]: I1003 17:14:48.856645 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f2385d1-a278-41cc-ba7c-f61641330a6d" containerName="mariadb-database-create" Oct 03 17:14:48 crc kubenswrapper[5081]: I1003 17:14:48.856880 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f2385d1-a278-41cc-ba7c-f61641330a6d" containerName="mariadb-database-create" Oct 03 17:14:48 crc kubenswrapper[5081]: I1003 17:14:48.857678 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3530-account-create-brtxv" Oct 03 17:14:48 crc kubenswrapper[5081]: I1003 17:14:48.860461 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Oct 03 17:14:48 crc kubenswrapper[5081]: I1003 17:14:48.872207 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-3530-account-create-brtxv"] Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.055927 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tshj9\" (UniqueName: \"kubernetes.io/projected/4dc291b0-2241-49ae-b987-c456e046aa57-kube-api-access-tshj9\") pod \"manila-3530-account-create-brtxv\" (UID: \"4dc291b0-2241-49ae-b987-c456e046aa57\") " pod="openstack/manila-3530-account-create-brtxv" Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.158057 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tshj9\" (UniqueName: \"kubernetes.io/projected/4dc291b0-2241-49ae-b987-c456e046aa57-kube-api-access-tshj9\") pod \"manila-3530-account-create-brtxv\" (UID: \"4dc291b0-2241-49ae-b987-c456e046aa57\") " pod="openstack/manila-3530-account-create-brtxv" Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.176798 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tshj9\" (UniqueName: \"kubernetes.io/projected/4dc291b0-2241-49ae-b987-c456e046aa57-kube-api-access-tshj9\") pod \"manila-3530-account-create-brtxv\" (UID: \"4dc291b0-2241-49ae-b987-c456e046aa57\") " pod="openstack/manila-3530-account-create-brtxv" Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.179982 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3530-account-create-brtxv" Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.610806 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-3530-account-create-brtxv"] Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.934385 5081 generic.go:334] "Generic (PLEG): container finished" podID="4dc291b0-2241-49ae-b987-c456e046aa57" containerID="b39aef380e29183744b55cfde8038231a362c26e3a487eb7c456d16d14e11558" exitCode=0 Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.934439 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-3530-account-create-brtxv" event={"ID":"4dc291b0-2241-49ae-b987-c456e046aa57","Type":"ContainerDied","Data":"b39aef380e29183744b55cfde8038231a362c26e3a487eb7c456d16d14e11558"} Oct 03 17:14:49 crc kubenswrapper[5081]: I1003 17:14:49.934745 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-3530-account-create-brtxv" event={"ID":"4dc291b0-2241-49ae-b987-c456e046aa57","Type":"ContainerStarted","Data":"a7ff1795409191eb66885bacfd73c8fee3a3ee8646d6cbf12e82abdb4bb5e2ee"} Oct 03 17:14:51 crc kubenswrapper[5081]: I1003 17:14:51.361238 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3530-account-create-brtxv" Oct 03 17:14:51 crc kubenswrapper[5081]: I1003 17:14:51.520217 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tshj9\" (UniqueName: \"kubernetes.io/projected/4dc291b0-2241-49ae-b987-c456e046aa57-kube-api-access-tshj9\") pod \"4dc291b0-2241-49ae-b987-c456e046aa57\" (UID: \"4dc291b0-2241-49ae-b987-c456e046aa57\") " Oct 03 17:14:51 crc kubenswrapper[5081]: I1003 17:14:51.527667 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc291b0-2241-49ae-b987-c456e046aa57-kube-api-access-tshj9" (OuterVolumeSpecName: "kube-api-access-tshj9") pod "4dc291b0-2241-49ae-b987-c456e046aa57" (UID: "4dc291b0-2241-49ae-b987-c456e046aa57"). InnerVolumeSpecName "kube-api-access-tshj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:14:51 crc kubenswrapper[5081]: I1003 17:14:51.622348 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tshj9\" (UniqueName: \"kubernetes.io/projected/4dc291b0-2241-49ae-b987-c456e046aa57-kube-api-access-tshj9\") on node \"crc\" DevicePath \"\"" Oct 03 17:14:51 crc kubenswrapper[5081]: I1003 17:14:51.952794 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-3530-account-create-brtxv" event={"ID":"4dc291b0-2241-49ae-b987-c456e046aa57","Type":"ContainerDied","Data":"a7ff1795409191eb66885bacfd73c8fee3a3ee8646d6cbf12e82abdb4bb5e2ee"} Oct 03 17:14:51 crc kubenswrapper[5081]: I1003 17:14:51.952840 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7ff1795409191eb66885bacfd73c8fee3a3ee8646d6cbf12e82abdb4bb5e2ee" Oct 03 17:14:51 crc kubenswrapper[5081]: I1003 17:14:51.952874 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3530-account-create-brtxv" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.109803 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-5qg9r"] Oct 03 17:14:54 crc kubenswrapper[5081]: E1003 17:14:54.111918 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc291b0-2241-49ae-b987-c456e046aa57" containerName="mariadb-account-create" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.111939 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc291b0-2241-49ae-b987-c456e046aa57" containerName="mariadb-account-create" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.112239 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc291b0-2241-49ae-b987-c456e046aa57" containerName="mariadb-account-create" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.113250 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.116533 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-jtv8r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.116807 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.121381 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-5qg9r"] Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.277378 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-combined-ca-bundle\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.277463 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9tr6\" (UniqueName: \"kubernetes.io/projected/613389b9-58e0-4c4e-b065-2b379fff72ed-kube-api-access-t9tr6\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.277524 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-job-config-data\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.277650 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-config-data\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.379481 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-job-config-data\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.379560 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-config-data\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.379737 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-combined-ca-bundle\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.379821 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9tr6\" (UniqueName: \"kubernetes.io/projected/613389b9-58e0-4c4e-b065-2b379fff72ed-kube-api-access-t9tr6\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.386120 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-combined-ca-bundle\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.386453 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-config-data\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.392056 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-job-config-data\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.399362 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9tr6\" (UniqueName: \"kubernetes.io/projected/613389b9-58e0-4c4e-b065-2b379fff72ed-kube-api-access-t9tr6\") pod \"manila-db-sync-5qg9r\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:54 crc kubenswrapper[5081]: I1003 17:14:54.445141 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-5qg9r" Oct 03 17:14:55 crc kubenswrapper[5081]: I1003 17:14:55.039728 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-5qg9r"] Oct 03 17:14:55 crc kubenswrapper[5081]: W1003 17:14:55.054126 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod613389b9_58e0_4c4e_b065_2b379fff72ed.slice/crio-b2fb9940dd408188c7a781d96b44ed5924bcd1bb041e796293537899795ef01b WatchSource:0}: Error finding container b2fb9940dd408188c7a781d96b44ed5924bcd1bb041e796293537899795ef01b: Status 404 returned error can't find the container with id b2fb9940dd408188c7a781d96b44ed5924bcd1bb041e796293537899795ef01b Oct 03 17:14:55 crc kubenswrapper[5081]: I1003 17:14:55.988692 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-5qg9r" event={"ID":"613389b9-58e0-4c4e-b065-2b379fff72ed","Type":"ContainerStarted","Data":"b2fb9940dd408188c7a781d96b44ed5924bcd1bb041e796293537899795ef01b"} Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.041441 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-5qg9r" event={"ID":"613389b9-58e0-4c4e-b065-2b379fff72ed","Type":"ContainerStarted","Data":"a4a1f8225d015010c8c489aea5fb798e939911e5ac2ccf7ee492545c86fe5451"} Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.063951 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-5qg9r" podStartSLOduration=1.792306717 podStartE2EDuration="6.063931688s" podCreationTimestamp="2025-10-03 17:14:54 +0000 UTC" firstStartedPulling="2025-10-03 17:14:55.05738808 +0000 UTC m=+6414.022944693" lastFinishedPulling="2025-10-03 17:14:59.329013051 +0000 UTC m=+6418.294569664" observedRunningTime="2025-10-03 17:15:00.055822776 +0000 UTC m=+6419.021379399" watchObservedRunningTime="2025-10-03 17:15:00.063931688 +0000 UTC m=+6419.029488301" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.142545 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r"] Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.144327 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.146713 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.147397 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.162435 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r"] Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.314286 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d25a54a-5d99-4948-b99e-374c1fa40681-config-volume\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.314411 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d25a54a-5d99-4948-b99e-374c1fa40681-secret-volume\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.314441 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp4jg\" (UniqueName: \"kubernetes.io/projected/8d25a54a-5d99-4948-b99e-374c1fa40681-kube-api-access-gp4jg\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.417131 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d25a54a-5d99-4948-b99e-374c1fa40681-secret-volume\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.417509 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp4jg\" (UniqueName: \"kubernetes.io/projected/8d25a54a-5d99-4948-b99e-374c1fa40681-kube-api-access-gp4jg\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.417643 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d25a54a-5d99-4948-b99e-374c1fa40681-config-volume\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.418609 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d25a54a-5d99-4948-b99e-374c1fa40681-config-volume\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.435527 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp4jg\" (UniqueName: \"kubernetes.io/projected/8d25a54a-5d99-4948-b99e-374c1fa40681-kube-api-access-gp4jg\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.447889 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d25a54a-5d99-4948-b99e-374c1fa40681-secret-volume\") pod \"collect-profiles-29325195-6lx9r\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.464997 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.827891 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:15:00 crc kubenswrapper[5081]: E1003 17:15:00.828425 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:15:00 crc kubenswrapper[5081]: I1003 17:15:00.896686 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r"] Oct 03 17:15:01 crc kubenswrapper[5081]: I1003 17:15:01.080202 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" event={"ID":"8d25a54a-5d99-4948-b99e-374c1fa40681","Type":"ContainerStarted","Data":"5d73c8b29ec534fff72a080baba610803c048bd681e85fab902ca4338c7d3067"} Oct 03 17:15:02 crc kubenswrapper[5081]: I1003 17:15:02.092324 5081 generic.go:334] "Generic (PLEG): container finished" podID="613389b9-58e0-4c4e-b065-2b379fff72ed" containerID="a4a1f8225d015010c8c489aea5fb798e939911e5ac2ccf7ee492545c86fe5451" exitCode=0 Oct 03 17:15:02 crc kubenswrapper[5081]: I1003 17:15:02.092416 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-5qg9r" event={"ID":"613389b9-58e0-4c4e-b065-2b379fff72ed","Type":"ContainerDied","Data":"a4a1f8225d015010c8c489aea5fb798e939911e5ac2ccf7ee492545c86fe5451"} Oct 03 17:15:02 crc kubenswrapper[5081]: I1003 17:15:02.096925 5081 generic.go:334] "Generic (PLEG): container finished" podID="8d25a54a-5d99-4948-b99e-374c1fa40681" containerID="23923f40825074c4a6207c51bf39d02be0d8443aec4d7996035e69d513fb152c" exitCode=0 Oct 03 17:15:02 crc kubenswrapper[5081]: I1003 17:15:02.096977 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" event={"ID":"8d25a54a-5d99-4948-b99e-374c1fa40681","Type":"ContainerDied","Data":"23923f40825074c4a6207c51bf39d02be0d8443aec4d7996035e69d513fb152c"} Oct 03 17:15:02 crc kubenswrapper[5081]: I1003 17:15:02.181547 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.603075 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.703756 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d25a54a-5d99-4948-b99e-374c1fa40681-secret-volume\") pod \"8d25a54a-5d99-4948-b99e-374c1fa40681\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.704036 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d25a54a-5d99-4948-b99e-374c1fa40681-config-volume\") pod \"8d25a54a-5d99-4948-b99e-374c1fa40681\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.704086 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp4jg\" (UniqueName: \"kubernetes.io/projected/8d25a54a-5d99-4948-b99e-374c1fa40681-kube-api-access-gp4jg\") pod \"8d25a54a-5d99-4948-b99e-374c1fa40681\" (UID: \"8d25a54a-5d99-4948-b99e-374c1fa40681\") " Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.706542 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d25a54a-5d99-4948-b99e-374c1fa40681-config-volume" (OuterVolumeSpecName: "config-volume") pod "8d25a54a-5d99-4948-b99e-374c1fa40681" (UID: "8d25a54a-5d99-4948-b99e-374c1fa40681"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.712634 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d25a54a-5d99-4948-b99e-374c1fa40681-kube-api-access-gp4jg" (OuterVolumeSpecName: "kube-api-access-gp4jg") pod "8d25a54a-5d99-4948-b99e-374c1fa40681" (UID: "8d25a54a-5d99-4948-b99e-374c1fa40681"). InnerVolumeSpecName "kube-api-access-gp4jg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.712675 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d25a54a-5d99-4948-b99e-374c1fa40681-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8d25a54a-5d99-4948-b99e-374c1fa40681" (UID: "8d25a54a-5d99-4948-b99e-374c1fa40681"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.808470 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8d25a54a-5d99-4948-b99e-374c1fa40681-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.808547 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d25a54a-5d99-4948-b99e-374c1fa40681-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.808716 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp4jg\" (UniqueName: \"kubernetes.io/projected/8d25a54a-5d99-4948-b99e-374c1fa40681-kube-api-access-gp4jg\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:03 crc kubenswrapper[5081]: I1003 17:15:03.840042 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-5qg9r" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.012663 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-job-config-data\") pod \"613389b9-58e0-4c4e-b065-2b379fff72ed\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.012754 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-config-data\") pod \"613389b9-58e0-4c4e-b065-2b379fff72ed\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.012797 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-combined-ca-bundle\") pod \"613389b9-58e0-4c4e-b065-2b379fff72ed\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.013072 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9tr6\" (UniqueName: \"kubernetes.io/projected/613389b9-58e0-4c4e-b065-2b379fff72ed-kube-api-access-t9tr6\") pod \"613389b9-58e0-4c4e-b065-2b379fff72ed\" (UID: \"613389b9-58e0-4c4e-b065-2b379fff72ed\") " Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.020025 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/613389b9-58e0-4c4e-b065-2b379fff72ed-kube-api-access-t9tr6" (OuterVolumeSpecName: "kube-api-access-t9tr6") pod "613389b9-58e0-4c4e-b065-2b379fff72ed" (UID: "613389b9-58e0-4c4e-b065-2b379fff72ed"). InnerVolumeSpecName "kube-api-access-t9tr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.020486 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "613389b9-58e0-4c4e-b065-2b379fff72ed" (UID: "613389b9-58e0-4c4e-b065-2b379fff72ed"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.031466 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-config-data" (OuterVolumeSpecName: "config-data") pod "613389b9-58e0-4c4e-b065-2b379fff72ed" (UID: "613389b9-58e0-4c4e-b065-2b379fff72ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.054844 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "613389b9-58e0-4c4e-b065-2b379fff72ed" (UID: "613389b9-58e0-4c4e-b065-2b379fff72ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.118348 5081 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-job-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.118398 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.118413 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/613389b9-58e0-4c4e-b065-2b379fff72ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.118426 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9tr6\" (UniqueName: \"kubernetes.io/projected/613389b9-58e0-4c4e-b065-2b379fff72ed-kube-api-access-t9tr6\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.126255 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-5qg9r" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.128004 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-5qg9r" event={"ID":"613389b9-58e0-4c4e-b065-2b379fff72ed","Type":"ContainerDied","Data":"b2fb9940dd408188c7a781d96b44ed5924bcd1bb041e796293537899795ef01b"} Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.128086 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2fb9940dd408188c7a781d96b44ed5924bcd1bb041e796293537899795ef01b" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.131211 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" event={"ID":"8d25a54a-5d99-4948-b99e-374c1fa40681","Type":"ContainerDied","Data":"5d73c8b29ec534fff72a080baba610803c048bd681e85fab902ca4338c7d3067"} Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.131298 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d73c8b29ec534fff72a080baba610803c048bd681e85fab902ca4338c7d3067" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.131305 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.490505 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 03 17:15:04 crc kubenswrapper[5081]: E1003 17:15:04.491145 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d25a54a-5d99-4948-b99e-374c1fa40681" containerName="collect-profiles" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.491195 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d25a54a-5d99-4948-b99e-374c1fa40681" containerName="collect-profiles" Oct 03 17:15:04 crc kubenswrapper[5081]: E1003 17:15:04.491228 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="613389b9-58e0-4c4e-b065-2b379fff72ed" containerName="manila-db-sync" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.491238 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="613389b9-58e0-4c4e-b065-2b379fff72ed" containerName="manila-db-sync" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.491540 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="613389b9-58e0-4c4e-b065-2b379fff72ed" containerName="manila-db-sync" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.491577 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d25a54a-5d99-4948-b99e-374c1fa40681" containerName="collect-profiles" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.492847 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.497540 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.497730 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.498076 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-jtv8r" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.502877 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.512341 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.622033 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.632546 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.634066 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.644887 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-scripts\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.645011 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-config-data\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.645217 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6wbx\" (UniqueName: \"kubernetes.io/projected/8bf9610d-b7df-4307-91cc-71c7dcc42da9-kube-api-access-l6wbx\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.645595 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.645637 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bf9610d-b7df-4307-91cc-71c7dcc42da9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.666295 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.674937 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.731396 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d5b7d99b5-snzbt"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.734478 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752092 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752166 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752233 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752343 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752368 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-ceph\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752399 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bf9610d-b7df-4307-91cc-71c7dcc42da9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752451 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752476 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-scripts\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752577 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-config-data\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752784 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-scripts\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752820 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6wbx\" (UniqueName: \"kubernetes.io/projected/8bf9610d-b7df-4307-91cc-71c7dcc42da9-kube-api-access-l6wbx\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.752844 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.753030 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-config-data\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.753090 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85wxb\" (UniqueName: \"kubernetes.io/projected/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-kube-api-access-85wxb\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.755254 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8bf9610d-b7df-4307-91cc-71c7dcc42da9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.769639 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-config-data\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.773606 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.785146 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.792172 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d5b7d99b5-snzbt"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.794274 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bf9610d-b7df-4307-91cc-71c7dcc42da9-scripts\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.806007 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6wbx\" (UniqueName: \"kubernetes.io/projected/8bf9610d-b7df-4307-91cc-71c7dcc42da9-kube-api-access-l6wbx\") pod \"manila-scheduler-0\" (UID: \"8bf9610d-b7df-4307-91cc-71c7dcc42da9\") " pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.848270 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.857705 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-sb\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.857886 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-config-data\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.857992 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85wxb\" (UniqueName: \"kubernetes.io/projected/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-kube-api-access-85wxb\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858091 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858128 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858170 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858252 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-ceph\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858314 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-config\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858445 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9dp4\" (UniqueName: \"kubernetes.io/projected/e47fa986-ad2a-49fe-b568-84f14dc016bf-kube-api-access-n9dp4\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858506 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-nb\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858544 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-dns-svc\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858626 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-scripts\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.858656 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.867579 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.868475 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.875740 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-ceph\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.876389 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-config-data\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.876795 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.882523 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-scripts\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.886427 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.887619 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325150-sl8zc"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.899453 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.912094 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85wxb\" (UniqueName: \"kubernetes.io/projected/e89d46ed-7517-4484-b4f7-9e067e6cf6d1-kube-api-access-85wxb\") pod \"manila-share-share1-0\" (UID: \"e89d46ed-7517-4484-b4f7-9e067e6cf6d1\") " pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.961420 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-config\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.962015 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9dp4\" (UniqueName: \"kubernetes.io/projected/e47fa986-ad2a-49fe-b568-84f14dc016bf-kube-api-access-n9dp4\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.962108 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-nb\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.962186 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-dns-svc\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.962826 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-sb\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.964627 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-config\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.964658 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-dns-svc\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.965361 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-nb\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.965775 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-sb\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.971947 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.972496 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.975257 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.981867 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.983906 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 03 17:15:04 crc kubenswrapper[5081]: I1003 17:15:04.987392 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9dp4\" (UniqueName: \"kubernetes.io/projected/e47fa986-ad2a-49fe-b568-84f14dc016bf-kube-api-access-n9dp4\") pod \"dnsmasq-dns-5d5b7d99b5-snzbt\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.018237 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.072111 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0ea72c99-363e-402d-a70d-ab74578c11b3-etc-machine-id\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.072170 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrttk\" (UniqueName: \"kubernetes.io/projected/0ea72c99-363e-402d-a70d-ab74578c11b3-kube-api-access-hrttk\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.072220 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.072257 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-config-data-custom\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.072402 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ea72c99-363e-402d-a70d-ab74578c11b3-logs\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.072539 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-config-data\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.072737 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-scripts\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.174411 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ea72c99-363e-402d-a70d-ab74578c11b3-logs\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.174914 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-config-data\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.175019 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-scripts\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.175080 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0ea72c99-363e-402d-a70d-ab74578c11b3-etc-machine-id\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.175107 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrttk\" (UniqueName: \"kubernetes.io/projected/0ea72c99-363e-402d-a70d-ab74578c11b3-kube-api-access-hrttk\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.175144 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.175175 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-config-data-custom\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.175205 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ea72c99-363e-402d-a70d-ab74578c11b3-logs\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.181261 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0ea72c99-363e-402d-a70d-ab74578c11b3-etc-machine-id\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.185552 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.185824 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-config-data\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.186555 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-config-data-custom\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.194023 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ea72c99-363e-402d-a70d-ab74578c11b3-scripts\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.199185 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrttk\" (UniqueName: \"kubernetes.io/projected/0ea72c99-363e-402d-a70d-ab74578c11b3-kube-api-access-hrttk\") pod \"manila-api-0\" (UID: \"0ea72c99-363e-402d-a70d-ab74578c11b3\") " pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.352021 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.608337 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 03 17:15:05 crc kubenswrapper[5081]: I1003 17:15:05.853448 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4dd4268-3d1d-42ee-abb7-e03624082842" path="/var/lib/kubelet/pods/d4dd4268-3d1d-42ee-abb7-e03624082842/volumes" Oct 03 17:15:06 crc kubenswrapper[5081]: I1003 17:15:06.131578 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d5b7d99b5-snzbt"] Oct 03 17:15:06 crc kubenswrapper[5081]: I1003 17:15:06.163003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"8bf9610d-b7df-4307-91cc-71c7dcc42da9","Type":"ContainerStarted","Data":"90cf3916b4458255f23c1b09afe558f309dc0dda9151e4095261641d42bc5cea"} Oct 03 17:15:06 crc kubenswrapper[5081]: I1003 17:15:06.165274 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" event={"ID":"e47fa986-ad2a-49fe-b568-84f14dc016bf","Type":"ContainerStarted","Data":"054b9c15663a3e5aa2dc51d3e96deb3f861d7d32445ae39fd0437831a6a491c9"} Oct 03 17:15:06 crc kubenswrapper[5081]: I1003 17:15:06.225950 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 03 17:15:06 crc kubenswrapper[5081]: W1003 17:15:06.244751 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode89d46ed_7517_4484_b4f7_9e067e6cf6d1.slice/crio-48dd46cde4c0cbe003cbeff8c2296eae09d4d48280b63bfa5ba7348dd8bc5222 WatchSource:0}: Error finding container 48dd46cde4c0cbe003cbeff8c2296eae09d4d48280b63bfa5ba7348dd8bc5222: Status 404 returned error can't find the container with id 48dd46cde4c0cbe003cbeff8c2296eae09d4d48280b63bfa5ba7348dd8bc5222 Oct 03 17:15:06 crc kubenswrapper[5081]: I1003 17:15:06.508188 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 03 17:15:06 crc kubenswrapper[5081]: W1003 17:15:06.604758 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ea72c99_363e_402d_a70d_ab74578c11b3.slice/crio-290b2a00c017f2648a5f588ee986653f53b0b002a8e3fe173ee390cc30f8e5f6 WatchSource:0}: Error finding container 290b2a00c017f2648a5f588ee986653f53b0b002a8e3fe173ee390cc30f8e5f6: Status 404 returned error can't find the container with id 290b2a00c017f2648a5f588ee986653f53b0b002a8e3fe173ee390cc30f8e5f6 Oct 03 17:15:07 crc kubenswrapper[5081]: I1003 17:15:07.199580 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"0ea72c99-363e-402d-a70d-ab74578c11b3","Type":"ContainerStarted","Data":"9ad0fe3ca1b5ad5f0a6f620f49d0759836db30d617f754a2c0fb3d8709794433"} Oct 03 17:15:07 crc kubenswrapper[5081]: I1003 17:15:07.200462 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"0ea72c99-363e-402d-a70d-ab74578c11b3","Type":"ContainerStarted","Data":"290b2a00c017f2648a5f588ee986653f53b0b002a8e3fe173ee390cc30f8e5f6"} Oct 03 17:15:07 crc kubenswrapper[5081]: I1003 17:15:07.205980 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e89d46ed-7517-4484-b4f7-9e067e6cf6d1","Type":"ContainerStarted","Data":"48dd46cde4c0cbe003cbeff8c2296eae09d4d48280b63bfa5ba7348dd8bc5222"} Oct 03 17:15:07 crc kubenswrapper[5081]: I1003 17:15:07.210329 5081 generic.go:334] "Generic (PLEG): container finished" podID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerID="2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65" exitCode=0 Oct 03 17:15:07 crc kubenswrapper[5081]: I1003 17:15:07.210369 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" event={"ID":"e47fa986-ad2a-49fe-b568-84f14dc016bf","Type":"ContainerDied","Data":"2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65"} Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.227983 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"8bf9610d-b7df-4307-91cc-71c7dcc42da9","Type":"ContainerStarted","Data":"aa71758ecd4bf8e6c8550a7fdec5e1844c8e7a5e596258031f28c645bd447137"} Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.228341 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"8bf9610d-b7df-4307-91cc-71c7dcc42da9","Type":"ContainerStarted","Data":"c8e2d6e33f9736f189cad8ddfd90b5430dc7f8a6d043e0826001f8e0ec5be396"} Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.239756 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" event={"ID":"e47fa986-ad2a-49fe-b568-84f14dc016bf","Type":"ContainerStarted","Data":"39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab"} Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.240048 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.244208 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"0ea72c99-363e-402d-a70d-ab74578c11b3","Type":"ContainerStarted","Data":"f18d71968af99e2c4564fb215db2538f414dd2b95c02fcd6d288ac2406c55d64"} Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.244575 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.246634 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.340723052 podStartE2EDuration="4.246619045s" podCreationTimestamp="2025-10-03 17:15:04 +0000 UTC" firstStartedPulling="2025-10-03 17:15:05.711237936 +0000 UTC m=+6424.676794549" lastFinishedPulling="2025-10-03 17:15:06.617133929 +0000 UTC m=+6425.582690542" observedRunningTime="2025-10-03 17:15:08.245969226 +0000 UTC m=+6427.211525849" watchObservedRunningTime="2025-10-03 17:15:08.246619045 +0000 UTC m=+6427.212175658" Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.268755 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" podStartSLOduration=4.2687308680000005 podStartE2EDuration="4.268730868s" podCreationTimestamp="2025-10-03 17:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:15:08.265855036 +0000 UTC m=+6427.231411659" watchObservedRunningTime="2025-10-03 17:15:08.268730868 +0000 UTC m=+6427.234287481" Oct 03 17:15:08 crc kubenswrapper[5081]: I1003 17:15:08.289927 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=4.289910515 podStartE2EDuration="4.289910515s" podCreationTimestamp="2025-10-03 17:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:15:08.286292311 +0000 UTC m=+6427.251848934" watchObservedRunningTime="2025-10-03 17:15:08.289910515 +0000 UTC m=+6427.255467128" Oct 03 17:15:12 crc kubenswrapper[5081]: I1003 17:15:12.828136 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:15:12 crc kubenswrapper[5081]: E1003 17:15:12.829023 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:15:13 crc kubenswrapper[5081]: I1003 17:15:13.301227 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e89d46ed-7517-4484-b4f7-9e067e6cf6d1","Type":"ContainerStarted","Data":"4bda4ec01fae6f5759d249887b1f21d6f491a97b62b240494e2b47db59779d49"} Oct 03 17:15:13 crc kubenswrapper[5081]: I1003 17:15:13.301280 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"e89d46ed-7517-4484-b4f7-9e067e6cf6d1","Type":"ContainerStarted","Data":"b4cae7bf103b7940a8c93e12d9ad70ec94fe5e8c54881b9652a235e59574ae09"} Oct 03 17:15:13 crc kubenswrapper[5081]: I1003 17:15:13.326149 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.65178722 podStartE2EDuration="9.326130752s" podCreationTimestamp="2025-10-03 17:15:04 +0000 UTC" firstStartedPulling="2025-10-03 17:15:06.293090439 +0000 UTC m=+6425.258647052" lastFinishedPulling="2025-10-03 17:15:11.967433971 +0000 UTC m=+6430.932990584" observedRunningTime="2025-10-03 17:15:13.320083619 +0000 UTC m=+6432.285640232" watchObservedRunningTime="2025-10-03 17:15:13.326130752 +0000 UTC m=+6432.291687355" Oct 03 17:15:14 crc kubenswrapper[5081]: I1003 17:15:14.900326 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 03 17:15:14 crc kubenswrapper[5081]: I1003 17:15:14.973209 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.019747 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.094144 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-565544976c-mklxz"] Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.094542 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-565544976c-mklxz" podUID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerName="dnsmasq-dns" containerID="cri-o://ebe8ae23fef3f4cfec9d42b20a833ced18e8c2ff9b316e59819248277886b5d2" gracePeriod=10 Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.327165 5081 generic.go:334] "Generic (PLEG): container finished" podID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerID="ebe8ae23fef3f4cfec9d42b20a833ced18e8c2ff9b316e59819248277886b5d2" exitCode=0 Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.327316 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565544976c-mklxz" event={"ID":"fd9e84b4-3aa8-4013-b48d-90c1c0e04215","Type":"ContainerDied","Data":"ebe8ae23fef3f4cfec9d42b20a833ced18e8c2ff9b316e59819248277886b5d2"} Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.731730 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.892436 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-nb\") pod \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.892856 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8hjw\" (UniqueName: \"kubernetes.io/projected/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-kube-api-access-h8hjw\") pod \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.892952 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-config\") pod \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.893040 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-dns-svc\") pod \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.893244 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-sb\") pod \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\" (UID: \"fd9e84b4-3aa8-4013-b48d-90c1c0e04215\") " Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.906525 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-kube-api-access-h8hjw" (OuterVolumeSpecName: "kube-api-access-h8hjw") pod "fd9e84b4-3aa8-4013-b48d-90c1c0e04215" (UID: "fd9e84b4-3aa8-4013-b48d-90c1c0e04215"). InnerVolumeSpecName "kube-api-access-h8hjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.952542 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fd9e84b4-3aa8-4013-b48d-90c1c0e04215" (UID: "fd9e84b4-3aa8-4013-b48d-90c1c0e04215"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.969669 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-config" (OuterVolumeSpecName: "config") pod "fd9e84b4-3aa8-4013-b48d-90c1c0e04215" (UID: "fd9e84b4-3aa8-4013-b48d-90c1c0e04215"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.977580 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fd9e84b4-3aa8-4013-b48d-90c1c0e04215" (UID: "fd9e84b4-3aa8-4013-b48d-90c1c0e04215"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:15:15 crc kubenswrapper[5081]: I1003 17:15:15.978836 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fd9e84b4-3aa8-4013-b48d-90c1c0e04215" (UID: "fd9e84b4-3aa8-4013-b48d-90c1c0e04215"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.009336 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.009375 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.009386 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8hjw\" (UniqueName: \"kubernetes.io/projected/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-kube-api-access-h8hjw\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.009396 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.009405 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd9e84b4-3aa8-4013-b48d-90c1c0e04215-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.357144 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-565544976c-mklxz" event={"ID":"fd9e84b4-3aa8-4013-b48d-90c1c0e04215","Type":"ContainerDied","Data":"169c3c5944a67f6e7389ac56ae14c0d83b09905a7bc9f48aad853414d3667f77"} Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.357224 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-565544976c-mklxz" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.357328 5081 scope.go:117] "RemoveContainer" containerID="ebe8ae23fef3f4cfec9d42b20a833ced18e8c2ff9b316e59819248277886b5d2" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.402957 5081 scope.go:117] "RemoveContainer" containerID="8ccc246d5b64859981f87ddc8483cff5b5f42f51c32f6c3c37be32afc94fab73" Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.411136 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-565544976c-mklxz"] Oct 03 17:15:16 crc kubenswrapper[5081]: I1003 17:15:16.433387 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-565544976c-mklxz"] Oct 03 17:15:17 crc kubenswrapper[5081]: I1003 17:15:17.844668 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" path="/var/lib/kubelet/pods/fd9e84b4-3aa8-4013-b48d-90c1c0e04215/volumes" Oct 03 17:15:18 crc kubenswrapper[5081]: I1003 17:15:18.377777 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:15:18 crc kubenswrapper[5081]: I1003 17:15:18.378672 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-central-agent" containerID="cri-o://fb2e73bf31bb4c94c33e13304fed5fa404009f11913a0ba5d0b677c7a5fc6480" gracePeriod=30 Oct 03 17:15:18 crc kubenswrapper[5081]: I1003 17:15:18.379131 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="proxy-httpd" containerID="cri-o://c79b4e2ecaa7e3121432a883894d5ec8e6ddd55cf3407414fc8ae91e5b5fcb81" gracePeriod=30 Oct 03 17:15:18 crc kubenswrapper[5081]: I1003 17:15:18.379215 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="sg-core" containerID="cri-o://72c2504e339374ee0480e77b3731911ef989887cd77ed65631095a734dc4ba55" gracePeriod=30 Oct 03 17:15:18 crc kubenswrapper[5081]: I1003 17:15:18.379269 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-notification-agent" containerID="cri-o://c119038b76ae4db623d13e3a79af678922ad264506f6f32b20ebbb64302ed8c1" gracePeriod=30 Oct 03 17:15:19 crc kubenswrapper[5081]: I1003 17:15:19.394721 5081 generic.go:334] "Generic (PLEG): container finished" podID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerID="c79b4e2ecaa7e3121432a883894d5ec8e6ddd55cf3407414fc8ae91e5b5fcb81" exitCode=0 Oct 03 17:15:19 crc kubenswrapper[5081]: I1003 17:15:19.394766 5081 generic.go:334] "Generic (PLEG): container finished" podID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerID="72c2504e339374ee0480e77b3731911ef989887cd77ed65631095a734dc4ba55" exitCode=2 Oct 03 17:15:19 crc kubenswrapper[5081]: I1003 17:15:19.394775 5081 generic.go:334] "Generic (PLEG): container finished" podID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerID="fb2e73bf31bb4c94c33e13304fed5fa404009f11913a0ba5d0b677c7a5fc6480" exitCode=0 Oct 03 17:15:19 crc kubenswrapper[5081]: I1003 17:15:19.394769 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerDied","Data":"c79b4e2ecaa7e3121432a883894d5ec8e6ddd55cf3407414fc8ae91e5b5fcb81"} Oct 03 17:15:19 crc kubenswrapper[5081]: I1003 17:15:19.394817 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerDied","Data":"72c2504e339374ee0480e77b3731911ef989887cd77ed65631095a734dc4ba55"} Oct 03 17:15:19 crc kubenswrapper[5081]: I1003 17:15:19.394827 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerDied","Data":"fb2e73bf31bb4c94c33e13304fed5fa404009f11913a0ba5d0b677c7a5fc6480"} Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.433432 5081 generic.go:334] "Generic (PLEG): container finished" podID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerID="c119038b76ae4db623d13e3a79af678922ad264506f6f32b20ebbb64302ed8c1" exitCode=0 Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.433521 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerDied","Data":"c119038b76ae4db623d13e3a79af678922ad264506f6f32b20ebbb64302ed8c1"} Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.739409 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.857009 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-combined-ca-bundle\") pod \"b3462020-4c50-47bb-880f-de9fcbc47c4c\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.857096 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-log-httpd\") pod \"b3462020-4c50-47bb-880f-de9fcbc47c4c\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.857150 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mckrw\" (UniqueName: \"kubernetes.io/projected/b3462020-4c50-47bb-880f-de9fcbc47c4c-kube-api-access-mckrw\") pod \"b3462020-4c50-47bb-880f-de9fcbc47c4c\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.857209 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-run-httpd\") pod \"b3462020-4c50-47bb-880f-de9fcbc47c4c\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.857282 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-scripts\") pod \"b3462020-4c50-47bb-880f-de9fcbc47c4c\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.857357 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-config-data\") pod \"b3462020-4c50-47bb-880f-de9fcbc47c4c\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.857477 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-sg-core-conf-yaml\") pod \"b3462020-4c50-47bb-880f-de9fcbc47c4c\" (UID: \"b3462020-4c50-47bb-880f-de9fcbc47c4c\") " Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.859883 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b3462020-4c50-47bb-880f-de9fcbc47c4c" (UID: "b3462020-4c50-47bb-880f-de9fcbc47c4c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.861857 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b3462020-4c50-47bb-880f-de9fcbc47c4c" (UID: "b3462020-4c50-47bb-880f-de9fcbc47c4c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.862503 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-scripts" (OuterVolumeSpecName: "scripts") pod "b3462020-4c50-47bb-880f-de9fcbc47c4c" (UID: "b3462020-4c50-47bb-880f-de9fcbc47c4c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.862683 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3462020-4c50-47bb-880f-de9fcbc47c4c-kube-api-access-mckrw" (OuterVolumeSpecName: "kube-api-access-mckrw") pod "b3462020-4c50-47bb-880f-de9fcbc47c4c" (UID: "b3462020-4c50-47bb-880f-de9fcbc47c4c"). InnerVolumeSpecName "kube-api-access-mckrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.893099 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b3462020-4c50-47bb-880f-de9fcbc47c4c" (UID: "b3462020-4c50-47bb-880f-de9fcbc47c4c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.941422 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3462020-4c50-47bb-880f-de9fcbc47c4c" (UID: "b3462020-4c50-47bb-880f-de9fcbc47c4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.960046 5081 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.960076 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.960093 5081 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.960102 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mckrw\" (UniqueName: \"kubernetes.io/projected/b3462020-4c50-47bb-880f-de9fcbc47c4c-kube-api-access-mckrw\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.960114 5081 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3462020-4c50-47bb-880f-de9fcbc47c4c-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.960123 5081 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:22 crc kubenswrapper[5081]: I1003 17:15:22.978876 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-config-data" (OuterVolumeSpecName: "config-data") pod "b3462020-4c50-47bb-880f-de9fcbc47c4c" (UID: "b3462020-4c50-47bb-880f-de9fcbc47c4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.062701 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3462020-4c50-47bb-880f-de9fcbc47c4c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.453198 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3462020-4c50-47bb-880f-de9fcbc47c4c","Type":"ContainerDied","Data":"e16f2a4486af8cbe47c531566d42980080bcd611c2220a6a1d35659052473dd4"} Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.453252 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.453260 5081 scope.go:117] "RemoveContainer" containerID="c79b4e2ecaa7e3121432a883894d5ec8e6ddd55cf3407414fc8ae91e5b5fcb81" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.493433 5081 scope.go:117] "RemoveContainer" containerID="72c2504e339374ee0480e77b3731911ef989887cd77ed65631095a734dc4ba55" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.498437 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.511624 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.524695 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:15:23 crc kubenswrapper[5081]: E1003 17:15:23.525292 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-notification-agent" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525316 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-notification-agent" Oct 03 17:15:23 crc kubenswrapper[5081]: E1003 17:15:23.525335 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerName="dnsmasq-dns" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525346 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerName="dnsmasq-dns" Oct 03 17:15:23 crc kubenswrapper[5081]: E1003 17:15:23.525363 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerName="init" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525371 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerName="init" Oct 03 17:15:23 crc kubenswrapper[5081]: E1003 17:15:23.525392 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="sg-core" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525400 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="sg-core" Oct 03 17:15:23 crc kubenswrapper[5081]: E1003 17:15:23.525414 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="proxy-httpd" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525421 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="proxy-httpd" Oct 03 17:15:23 crc kubenswrapper[5081]: E1003 17:15:23.525441 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-central-agent" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525448 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-central-agent" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525767 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="proxy-httpd" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525793 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd9e84b4-3aa8-4013-b48d-90c1c0e04215" containerName="dnsmasq-dns" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525821 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-central-agent" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525835 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="sg-core" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.525850 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" containerName="ceilometer-notification-agent" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.528360 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.532305 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.532653 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.534004 5081 scope.go:117] "RemoveContainer" containerID="c119038b76ae4db623d13e3a79af678922ad264506f6f32b20ebbb64302ed8c1" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.547530 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.562280 5081 scope.go:117] "RemoveContainer" containerID="fb2e73bf31bb4c94c33e13304fed5fa404009f11913a0ba5d0b677c7a5fc6480" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.678666 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-config-data\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.678716 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1633625d-b110-403f-81cf-378b74105c5d-log-httpd\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.678906 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzht4\" (UniqueName: \"kubernetes.io/projected/1633625d-b110-403f-81cf-378b74105c5d-kube-api-access-pzht4\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.679195 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.679259 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-scripts\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.679682 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1633625d-b110-403f-81cf-378b74105c5d-run-httpd\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.679789 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.781741 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1633625d-b110-403f-81cf-378b74105c5d-run-httpd\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.781802 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.781906 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-config-data\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.781930 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1633625d-b110-403f-81cf-378b74105c5d-log-httpd\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.781959 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzht4\" (UniqueName: \"kubernetes.io/projected/1633625d-b110-403f-81cf-378b74105c5d-kube-api-access-pzht4\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.782040 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.782326 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1633625d-b110-403f-81cf-378b74105c5d-run-httpd\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.782405 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1633625d-b110-403f-81cf-378b74105c5d-log-httpd\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.782575 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-scripts\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.785979 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.786130 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-scripts\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.787434 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.796688 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1633625d-b110-403f-81cf-378b74105c5d-config-data\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.806417 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzht4\" (UniqueName: \"kubernetes.io/projected/1633625d-b110-403f-81cf-378b74105c5d-kube-api-access-pzht4\") pod \"ceilometer-0\" (UID: \"1633625d-b110-403f-81cf-378b74105c5d\") " pod="openstack/ceilometer-0" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.828333 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:15:23 crc kubenswrapper[5081]: E1003 17:15:23.828684 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.841899 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3462020-4c50-47bb-880f-de9fcbc47c4c" path="/var/lib/kubelet/pods/b3462020-4c50-47bb-880f-de9fcbc47c4c/volumes" Oct 03 17:15:23 crc kubenswrapper[5081]: I1003 17:15:23.852431 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 17:15:24 crc kubenswrapper[5081]: I1003 17:15:24.326426 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 17:15:24 crc kubenswrapper[5081]: W1003 17:15:24.328332 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1633625d_b110_403f_81cf_378b74105c5d.slice/crio-f33a221175a598c9036f91ba0ee2501548199450e3463072b0fc47f77d828208 WatchSource:0}: Error finding container f33a221175a598c9036f91ba0ee2501548199450e3463072b0fc47f77d828208: Status 404 returned error can't find the container with id f33a221175a598c9036f91ba0ee2501548199450e3463072b0fc47f77d828208 Oct 03 17:15:24 crc kubenswrapper[5081]: I1003 17:15:24.331336 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:15:24 crc kubenswrapper[5081]: I1003 17:15:24.463060 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1633625d-b110-403f-81cf-378b74105c5d","Type":"ContainerStarted","Data":"f33a221175a598c9036f91ba0ee2501548199450e3463072b0fc47f77d828208"} Oct 03 17:15:25 crc kubenswrapper[5081]: I1003 17:15:25.865634 5081 scope.go:117] "RemoveContainer" containerID="bd45a2291b6a982f361416a8fa9d4261fd2dcc21edc6720d2067252d1c2c45a8" Oct 03 17:15:25 crc kubenswrapper[5081]: I1003 17:15:25.909865 5081 scope.go:117] "RemoveContainer" containerID="db66595cf36bc07d831ff8f3010834a7f223de38fc613bd88ae3a25417991442" Oct 03 17:15:25 crc kubenswrapper[5081]: I1003 17:15:25.990168 5081 scope.go:117] "RemoveContainer" containerID="93cddce6d79b185c4d62099eaefbc46316033a9853d46ee9c735461e460a29c1" Oct 03 17:15:26 crc kubenswrapper[5081]: I1003 17:15:26.487818 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1633625d-b110-403f-81cf-378b74105c5d","Type":"ContainerStarted","Data":"eb7899de5582f7273b4d2fb2ac4db87eb22611a74431cf56b57a69eb7add20cb"} Oct 03 17:15:26 crc kubenswrapper[5081]: I1003 17:15:26.629735 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 03 17:15:26 crc kubenswrapper[5081]: I1003 17:15:26.665226 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 03 17:15:26 crc kubenswrapper[5081]: I1003 17:15:26.745446 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Oct 03 17:15:28 crc kubenswrapper[5081]: I1003 17:15:28.510872 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1633625d-b110-403f-81cf-378b74105c5d","Type":"ContainerStarted","Data":"0c5012c7ce67ef7d7b037743302bee0a4a9d10f15156a5a141aca3a448d7f23a"} Oct 03 17:15:28 crc kubenswrapper[5081]: I1003 17:15:28.511377 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1633625d-b110-403f-81cf-378b74105c5d","Type":"ContainerStarted","Data":"e9d1c78196f5a5887688c4f5c170635b66e9ee89bda87b44dd820dc6b4c82731"} Oct 03 17:15:30 crc kubenswrapper[5081]: I1003 17:15:30.535533 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1633625d-b110-403f-81cf-378b74105c5d","Type":"ContainerStarted","Data":"37dd2092a09816d631cc15af2ebaeca1cd7d9c8bafcd726f4a9dc4b144c1476a"} Oct 03 17:15:30 crc kubenswrapper[5081]: I1003 17:15:30.536293 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 17:15:30 crc kubenswrapper[5081]: I1003 17:15:30.565922 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.38896131 podStartE2EDuration="7.565903218s" podCreationTimestamp="2025-10-03 17:15:23 +0000 UTC" firstStartedPulling="2025-10-03 17:15:24.330865409 +0000 UTC m=+6443.296422022" lastFinishedPulling="2025-10-03 17:15:29.507807317 +0000 UTC m=+6448.473363930" observedRunningTime="2025-10-03 17:15:30.556586552 +0000 UTC m=+6449.522143185" watchObservedRunningTime="2025-10-03 17:15:30.565903218 +0000 UTC m=+6449.531459831" Oct 03 17:15:36 crc kubenswrapper[5081]: I1003 17:15:36.828451 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:15:36 crc kubenswrapper[5081]: E1003 17:15:36.829252 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:15:51 crc kubenswrapper[5081]: I1003 17:15:51.837651 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:15:51 crc kubenswrapper[5081]: E1003 17:15:51.838662 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:15:53 crc kubenswrapper[5081]: I1003 17:15:53.856920 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 17:16:04 crc kubenswrapper[5081]: I1003 17:16:04.828412 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:16:04 crc kubenswrapper[5081]: E1003 17:16:04.829748 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.767011 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95586846f-5gcrc"] Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.769721 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.775050 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.797160 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95586846f-5gcrc"] Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.878622 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26mws\" (UniqueName: \"kubernetes.io/projected/515f3a6e-bb62-48f9-b56a-ebda008e825e-kube-api-access-26mws\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.878683 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-sb\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.879097 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-openstack-cell1\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.879143 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-nb\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.879227 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-config\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.879311 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-dns-svc\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.981061 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-openstack-cell1\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.981099 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-nb\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.981137 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-config\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.981178 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-dns-svc\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.981225 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26mws\" (UniqueName: \"kubernetes.io/projected/515f3a6e-bb62-48f9-b56a-ebda008e825e-kube-api-access-26mws\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.981249 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-sb\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.982250 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-openstack-cell1\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.982305 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-config\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.982746 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-sb\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.983087 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-nb\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:12 crc kubenswrapper[5081]: I1003 17:16:12.983366 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-dns-svc\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:13 crc kubenswrapper[5081]: I1003 17:16:13.000394 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26mws\" (UniqueName: \"kubernetes.io/projected/515f3a6e-bb62-48f9-b56a-ebda008e825e-kube-api-access-26mws\") pod \"dnsmasq-dns-95586846f-5gcrc\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:13 crc kubenswrapper[5081]: I1003 17:16:13.100149 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:13 crc kubenswrapper[5081]: I1003 17:16:13.591193 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95586846f-5gcrc"] Oct 03 17:16:13 crc kubenswrapper[5081]: I1003 17:16:13.993295 5081 generic.go:334] "Generic (PLEG): container finished" podID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerID="2421d228f7c1b26a3eaff56cb1c8c82f1827dafed6d8900b0e6c0342e74059bb" exitCode=0 Oct 03 17:16:13 crc kubenswrapper[5081]: I1003 17:16:13.993354 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95586846f-5gcrc" event={"ID":"515f3a6e-bb62-48f9-b56a-ebda008e825e","Type":"ContainerDied","Data":"2421d228f7c1b26a3eaff56cb1c8c82f1827dafed6d8900b0e6c0342e74059bb"} Oct 03 17:16:13 crc kubenswrapper[5081]: I1003 17:16:13.993723 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95586846f-5gcrc" event={"ID":"515f3a6e-bb62-48f9-b56a-ebda008e825e","Type":"ContainerStarted","Data":"b334ce0bf8e37d17624b7aa828aa8e2f58e61ce2ccba59d0756f3893cd53c042"} Oct 03 17:16:15 crc kubenswrapper[5081]: I1003 17:16:15.005879 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95586846f-5gcrc" event={"ID":"515f3a6e-bb62-48f9-b56a-ebda008e825e","Type":"ContainerStarted","Data":"7fa3b69be2cfce7efa81b95cbc72806892929cf7ddb0e75a5961d90b46be2afe"} Oct 03 17:16:15 crc kubenswrapper[5081]: I1003 17:16:15.006216 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:15 crc kubenswrapper[5081]: I1003 17:16:15.030029 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-95586846f-5gcrc" podStartSLOduration=3.030010958 podStartE2EDuration="3.030010958s" podCreationTimestamp="2025-10-03 17:16:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:16:15.02623311 +0000 UTC m=+6493.991789743" watchObservedRunningTime="2025-10-03 17:16:15.030010958 +0000 UTC m=+6493.995567571" Oct 03 17:16:17 crc kubenswrapper[5081]: I1003 17:16:17.828305 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:16:17 crc kubenswrapper[5081]: E1003 17:16:17.829117 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.101722 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.157531 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d5b7d99b5-snzbt"] Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.158068 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" podUID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerName="dnsmasq-dns" containerID="cri-o://39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab" gracePeriod=10 Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.289273 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66d4fbb967-2bmls"] Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.297276 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.307554 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66d4fbb967-2bmls"] Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.431967 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-ovsdbserver-nb\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.432449 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-dns-svc\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.432483 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-ovsdbserver-sb\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.432520 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-openstack-cell1\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.432622 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnbt8\" (UniqueName: \"kubernetes.io/projected/36922327-95b1-465f-9628-2a9056e8f6b4-kube-api-access-nnbt8\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.432695 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-config\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.534857 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-dns-svc\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.534904 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-ovsdbserver-sb\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.534935 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-openstack-cell1\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.535910 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-dns-svc\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.536013 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnbt8\" (UniqueName: \"kubernetes.io/projected/36922327-95b1-465f-9628-2a9056e8f6b4-kube-api-access-nnbt8\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.536094 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-config\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.536242 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-ovsdbserver-nb\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.536966 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-ovsdbserver-nb\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.537149 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-ovsdbserver-sb\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.537676 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-openstack-cell1\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.537773 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36922327-95b1-465f-9628-2a9056e8f6b4-config\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.596091 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnbt8\" (UniqueName: \"kubernetes.io/projected/36922327-95b1-465f-9628-2a9056e8f6b4-kube-api-access-nnbt8\") pod \"dnsmasq-dns-66d4fbb967-2bmls\" (UID: \"36922327-95b1-465f-9628-2a9056e8f6b4\") " pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.662731 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.835794 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.949288 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-config\") pod \"e47fa986-ad2a-49fe-b568-84f14dc016bf\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.949329 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-sb\") pod \"e47fa986-ad2a-49fe-b568-84f14dc016bf\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.949495 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9dp4\" (UniqueName: \"kubernetes.io/projected/e47fa986-ad2a-49fe-b568-84f14dc016bf-kube-api-access-n9dp4\") pod \"e47fa986-ad2a-49fe-b568-84f14dc016bf\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.949555 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-nb\") pod \"e47fa986-ad2a-49fe-b568-84f14dc016bf\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.949631 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-dns-svc\") pod \"e47fa986-ad2a-49fe-b568-84f14dc016bf\" (UID: \"e47fa986-ad2a-49fe-b568-84f14dc016bf\") " Oct 03 17:16:23 crc kubenswrapper[5081]: I1003 17:16:23.967634 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e47fa986-ad2a-49fe-b568-84f14dc016bf-kube-api-access-n9dp4" (OuterVolumeSpecName: "kube-api-access-n9dp4") pod "e47fa986-ad2a-49fe-b568-84f14dc016bf" (UID: "e47fa986-ad2a-49fe-b568-84f14dc016bf"). InnerVolumeSpecName "kube-api-access-n9dp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.047850 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e47fa986-ad2a-49fe-b568-84f14dc016bf" (UID: "e47fa986-ad2a-49fe-b568-84f14dc016bf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.052959 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9dp4\" (UniqueName: \"kubernetes.io/projected/e47fa986-ad2a-49fe-b568-84f14dc016bf-kube-api-access-n9dp4\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.053000 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.058282 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e47fa986-ad2a-49fe-b568-84f14dc016bf" (UID: "e47fa986-ad2a-49fe-b568-84f14dc016bf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.096422 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-config" (OuterVolumeSpecName: "config") pod "e47fa986-ad2a-49fe-b568-84f14dc016bf" (UID: "e47fa986-ad2a-49fe-b568-84f14dc016bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.119774 5081 generic.go:334] "Generic (PLEG): container finished" podID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerID="39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab" exitCode=0 Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.120065 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" event={"ID":"e47fa986-ad2a-49fe-b568-84f14dc016bf","Type":"ContainerDied","Data":"39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab"} Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.120098 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" event={"ID":"e47fa986-ad2a-49fe-b568-84f14dc016bf","Type":"ContainerDied","Data":"054b9c15663a3e5aa2dc51d3e96deb3f861d7d32445ae39fd0437831a6a491c9"} Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.120152 5081 scope.go:117] "RemoveContainer" containerID="39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.120254 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5b7d99b5-snzbt" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.134627 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e47fa986-ad2a-49fe-b568-84f14dc016bf" (UID: "e47fa986-ad2a-49fe-b568-84f14dc016bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.154976 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.155006 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.155019 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e47fa986-ad2a-49fe-b568-84f14dc016bf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.155451 5081 scope.go:117] "RemoveContainer" containerID="2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.202579 5081 scope.go:117] "RemoveContainer" containerID="39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab" Oct 03 17:16:24 crc kubenswrapper[5081]: E1003 17:16:24.203274 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab\": container with ID starting with 39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab not found: ID does not exist" containerID="39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.203321 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab"} err="failed to get container status \"39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab\": rpc error: code = NotFound desc = could not find container \"39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab\": container with ID starting with 39433ce2b6d66b5621d94fe7a2c4752b4b1977bbb5ad81d1f062bcfa002b71ab not found: ID does not exist" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.203374 5081 scope.go:117] "RemoveContainer" containerID="2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65" Oct 03 17:16:24 crc kubenswrapper[5081]: E1003 17:16:24.206363 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65\": container with ID starting with 2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65 not found: ID does not exist" containerID="2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.206433 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65"} err="failed to get container status \"2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65\": rpc error: code = NotFound desc = could not find container \"2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65\": container with ID starting with 2d43b4038945be558854f8c9f5be88cb9755a1616c76dbebe51d731e21ce1d65 not found: ID does not exist" Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.252886 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66d4fbb967-2bmls"] Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.497803 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d5b7d99b5-snzbt"] Oct 03 17:16:24 crc kubenswrapper[5081]: I1003 17:16:24.511858 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d5b7d99b5-snzbt"] Oct 03 17:16:25 crc kubenswrapper[5081]: I1003 17:16:25.134250 5081 generic.go:334] "Generic (PLEG): container finished" podID="36922327-95b1-465f-9628-2a9056e8f6b4" containerID="14579e744e1bba94715a7a3ac4dee87362c15fe247af7dfa66d8213fd94ae4d5" exitCode=0 Oct 03 17:16:25 crc kubenswrapper[5081]: I1003 17:16:25.134457 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" event={"ID":"36922327-95b1-465f-9628-2a9056e8f6b4","Type":"ContainerDied","Data":"14579e744e1bba94715a7a3ac4dee87362c15fe247af7dfa66d8213fd94ae4d5"} Oct 03 17:16:25 crc kubenswrapper[5081]: I1003 17:16:25.134752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" event={"ID":"36922327-95b1-465f-9628-2a9056e8f6b4","Type":"ContainerStarted","Data":"0db3622401b6051d455551d3606ef90f7af51bb8bd34e53f1b8ae222d52d0b2d"} Oct 03 17:16:25 crc kubenswrapper[5081]: I1003 17:16:25.840240 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e47fa986-ad2a-49fe-b568-84f14dc016bf" path="/var/lib/kubelet/pods/e47fa986-ad2a-49fe-b568-84f14dc016bf/volumes" Oct 03 17:16:26 crc kubenswrapper[5081]: I1003 17:16:26.148133 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" event={"ID":"36922327-95b1-465f-9628-2a9056e8f6b4","Type":"ContainerStarted","Data":"99f390a8bed41dec0b403872c2961e665c6bc630c328f60ad80b8d62089224bd"} Oct 03 17:16:26 crc kubenswrapper[5081]: I1003 17:16:26.149144 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:26 crc kubenswrapper[5081]: I1003 17:16:26.432483 5081 scope.go:117] "RemoveContainer" containerID="f1e67e11cb2083a24dc47a97cc4dbdc3f57c172c484a755f58ac6757f43d61ab" Oct 03 17:16:26 crc kubenswrapper[5081]: I1003 17:16:26.451969 5081 scope.go:117] "RemoveContainer" containerID="1f6700a73c927a287d9702cafb175792c22e14e4023b038774dbcca8cfa16946" Oct 03 17:16:30 crc kubenswrapper[5081]: I1003 17:16:30.827408 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:16:30 crc kubenswrapper[5081]: E1003 17:16:30.828239 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:16:33 crc kubenswrapper[5081]: I1003 17:16:33.665896 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" Oct 03 17:16:33 crc kubenswrapper[5081]: I1003 17:16:33.690078 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66d4fbb967-2bmls" podStartSLOduration=10.690056248 podStartE2EDuration="10.690056248s" podCreationTimestamp="2025-10-03 17:16:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:16:26.167309459 +0000 UTC m=+6505.132866082" watchObservedRunningTime="2025-10-03 17:16:33.690056248 +0000 UTC m=+6512.655612861" Oct 03 17:16:33 crc kubenswrapper[5081]: I1003 17:16:33.733890 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95586846f-5gcrc"] Oct 03 17:16:33 crc kubenswrapper[5081]: I1003 17:16:33.734153 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-95586846f-5gcrc" podUID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerName="dnsmasq-dns" containerID="cri-o://7fa3b69be2cfce7efa81b95cbc72806892929cf7ddb0e75a5961d90b46be2afe" gracePeriod=10 Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.222178 5081 generic.go:334] "Generic (PLEG): container finished" podID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerID="7fa3b69be2cfce7efa81b95cbc72806892929cf7ddb0e75a5961d90b46be2afe" exitCode=0 Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.222223 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95586846f-5gcrc" event={"ID":"515f3a6e-bb62-48f9-b56a-ebda008e825e","Type":"ContainerDied","Data":"7fa3b69be2cfce7efa81b95cbc72806892929cf7ddb0e75a5961d90b46be2afe"} Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.222792 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95586846f-5gcrc" event={"ID":"515f3a6e-bb62-48f9-b56a-ebda008e825e","Type":"ContainerDied","Data":"b334ce0bf8e37d17624b7aa828aa8e2f58e61ce2ccba59d0756f3893cd53c042"} Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.222812 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b334ce0bf8e37d17624b7aa828aa8e2f58e61ce2ccba59d0756f3893cd53c042" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.255813 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.371279 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-config\") pod \"515f3a6e-bb62-48f9-b56a-ebda008e825e\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.371376 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-nb\") pod \"515f3a6e-bb62-48f9-b56a-ebda008e825e\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.371475 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26mws\" (UniqueName: \"kubernetes.io/projected/515f3a6e-bb62-48f9-b56a-ebda008e825e-kube-api-access-26mws\") pod \"515f3a6e-bb62-48f9-b56a-ebda008e825e\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.371606 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-sb\") pod \"515f3a6e-bb62-48f9-b56a-ebda008e825e\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.371629 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-openstack-cell1\") pod \"515f3a6e-bb62-48f9-b56a-ebda008e825e\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.371715 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-dns-svc\") pod \"515f3a6e-bb62-48f9-b56a-ebda008e825e\" (UID: \"515f3a6e-bb62-48f9-b56a-ebda008e825e\") " Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.376598 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/515f3a6e-bb62-48f9-b56a-ebda008e825e-kube-api-access-26mws" (OuterVolumeSpecName: "kube-api-access-26mws") pod "515f3a6e-bb62-48f9-b56a-ebda008e825e" (UID: "515f3a6e-bb62-48f9-b56a-ebda008e825e"). InnerVolumeSpecName "kube-api-access-26mws". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.428213 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "515f3a6e-bb62-48f9-b56a-ebda008e825e" (UID: "515f3a6e-bb62-48f9-b56a-ebda008e825e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.429466 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "515f3a6e-bb62-48f9-b56a-ebda008e825e" (UID: "515f3a6e-bb62-48f9-b56a-ebda008e825e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.431183 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-config" (OuterVolumeSpecName: "config") pod "515f3a6e-bb62-48f9-b56a-ebda008e825e" (UID: "515f3a6e-bb62-48f9-b56a-ebda008e825e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.441266 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "515f3a6e-bb62-48f9-b56a-ebda008e825e" (UID: "515f3a6e-bb62-48f9-b56a-ebda008e825e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.441692 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "515f3a6e-bb62-48f9-b56a-ebda008e825e" (UID: "515f3a6e-bb62-48f9-b56a-ebda008e825e"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.474469 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.474516 5081 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-openstack-cell1\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.474530 5081 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.474542 5081 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-config\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.474553 5081 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/515f3a6e-bb62-48f9-b56a-ebda008e825e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:34 crc kubenswrapper[5081]: I1003 17:16:34.474582 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26mws\" (UniqueName: \"kubernetes.io/projected/515f3a6e-bb62-48f9-b56a-ebda008e825e-kube-api-access-26mws\") on node \"crc\" DevicePath \"\"" Oct 03 17:16:35 crc kubenswrapper[5081]: I1003 17:16:35.233232 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95586846f-5gcrc" Oct 03 17:16:35 crc kubenswrapper[5081]: I1003 17:16:35.264766 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95586846f-5gcrc"] Oct 03 17:16:35 crc kubenswrapper[5081]: I1003 17:16:35.272434 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95586846f-5gcrc"] Oct 03 17:16:35 crc kubenswrapper[5081]: I1003 17:16:35.839197 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="515f3a6e-bb62-48f9-b56a-ebda008e825e" path="/var/lib/kubelet/pods/515f3a6e-bb62-48f9-b56a-ebda008e825e/volumes" Oct 03 17:16:41 crc kubenswrapper[5081]: I1003 17:16:41.835176 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:16:41 crc kubenswrapper[5081]: E1003 17:16:41.836011 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.480781 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d"] Oct 03 17:16:44 crc kubenswrapper[5081]: E1003 17:16:44.481613 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerName="init" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.481633 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerName="init" Oct 03 17:16:44 crc kubenswrapper[5081]: E1003 17:16:44.481644 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerName="dnsmasq-dns" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.481652 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerName="dnsmasq-dns" Oct 03 17:16:44 crc kubenswrapper[5081]: E1003 17:16:44.481683 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerName="dnsmasq-dns" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.481692 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerName="dnsmasq-dns" Oct 03 17:16:44 crc kubenswrapper[5081]: E1003 17:16:44.481738 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerName="init" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.481747 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerName="init" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.482933 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="515f3a6e-bb62-48f9-b56a-ebda008e825e" containerName="dnsmasq-dns" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.482978 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e47fa986-ad2a-49fe-b568-84f14dc016bf" containerName="dnsmasq-dns" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.484225 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.488951 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.489033 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.489062 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.491372 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.496519 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d"] Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.586336 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.586603 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.586674 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jhmh\" (UniqueName: \"kubernetes.io/projected/5de866ec-f278-4793-953c-7fdd64f008d7-kube-api-access-2jhmh\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.586806 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.587273 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.689266 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.689382 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.689421 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jhmh\" (UniqueName: \"kubernetes.io/projected/5de866ec-f278-4793-953c-7fdd64f008d7-kube-api-access-2jhmh\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.689447 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.689623 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.695797 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.696784 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.698453 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.702747 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.706764 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jhmh\" (UniqueName: \"kubernetes.io/projected/5de866ec-f278-4793-953c-7fdd64f008d7-kube-api-access-2jhmh\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:44 crc kubenswrapper[5081]: I1003 17:16:44.812491 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:16:45 crc kubenswrapper[5081]: I1003 17:16:45.351778 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d"] Oct 03 17:16:45 crc kubenswrapper[5081]: W1003 17:16:45.354041 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5de866ec_f278_4793_953c_7fdd64f008d7.slice/crio-330e3a1812129643da36afdae11f62f94eeeb6bf92f3f68a416ca22dafd8af7e WatchSource:0}: Error finding container 330e3a1812129643da36afdae11f62f94eeeb6bf92f3f68a416ca22dafd8af7e: Status 404 returned error can't find the container with id 330e3a1812129643da36afdae11f62f94eeeb6bf92f3f68a416ca22dafd8af7e Oct 03 17:16:46 crc kubenswrapper[5081]: I1003 17:16:46.358779 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" event={"ID":"5de866ec-f278-4793-953c-7fdd64f008d7","Type":"ContainerStarted","Data":"330e3a1812129643da36afdae11f62f94eeeb6bf92f3f68a416ca22dafd8af7e"} Oct 03 17:16:53 crc kubenswrapper[5081]: I1003 17:16:53.828433 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:16:53 crc kubenswrapper[5081]: E1003 17:16:53.829240 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:16:54 crc kubenswrapper[5081]: I1003 17:16:54.427134 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" event={"ID":"5de866ec-f278-4793-953c-7fdd64f008d7","Type":"ContainerStarted","Data":"3c30d10674819f350a916d5b9d2b008015c748939b8df15a9d3eb4d2292ca51a"} Oct 03 17:16:54 crc kubenswrapper[5081]: I1003 17:16:54.455374 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" podStartSLOduration=2.3892686 podStartE2EDuration="10.455357118s" podCreationTimestamp="2025-10-03 17:16:44 +0000 UTC" firstStartedPulling="2025-10-03 17:16:45.356805662 +0000 UTC m=+6524.322362285" lastFinishedPulling="2025-10-03 17:16:53.42289419 +0000 UTC m=+6532.388450803" observedRunningTime="2025-10-03 17:16:54.442767547 +0000 UTC m=+6533.408324160" watchObservedRunningTime="2025-10-03 17:16:54.455357118 +0000 UTC m=+6533.420913731" Oct 03 17:17:04 crc kubenswrapper[5081]: I1003 17:17:04.827421 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:17:04 crc kubenswrapper[5081]: E1003 17:17:04.828427 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:17:06 crc kubenswrapper[5081]: I1003 17:17:06.550889 5081 generic.go:334] "Generic (PLEG): container finished" podID="5de866ec-f278-4793-953c-7fdd64f008d7" containerID="3c30d10674819f350a916d5b9d2b008015c748939b8df15a9d3eb4d2292ca51a" exitCode=0 Oct 03 17:17:06 crc kubenswrapper[5081]: I1003 17:17:06.551091 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" event={"ID":"5de866ec-f278-4793-953c-7fdd64f008d7","Type":"ContainerDied","Data":"3c30d10674819f350a916d5b9d2b008015c748939b8df15a9d3eb4d2292ca51a"} Oct 03 17:17:07 crc kubenswrapper[5081]: I1003 17:17:07.997414 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.113199 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-inventory\") pod \"5de866ec-f278-4793-953c-7fdd64f008d7\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.113373 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jhmh\" (UniqueName: \"kubernetes.io/projected/5de866ec-f278-4793-953c-7fdd64f008d7-kube-api-access-2jhmh\") pod \"5de866ec-f278-4793-953c-7fdd64f008d7\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.113412 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-pre-adoption-validation-combined-ca-bundle\") pod \"5de866ec-f278-4793-953c-7fdd64f008d7\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.113503 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ceph\") pod \"5de866ec-f278-4793-953c-7fdd64f008d7\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.113696 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ssh-key\") pod \"5de866ec-f278-4793-953c-7fdd64f008d7\" (UID: \"5de866ec-f278-4793-953c-7fdd64f008d7\") " Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.121630 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "5de866ec-f278-4793-953c-7fdd64f008d7" (UID: "5de866ec-f278-4793-953c-7fdd64f008d7"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.122747 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ceph" (OuterVolumeSpecName: "ceph") pod "5de866ec-f278-4793-953c-7fdd64f008d7" (UID: "5de866ec-f278-4793-953c-7fdd64f008d7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.124092 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5de866ec-f278-4793-953c-7fdd64f008d7-kube-api-access-2jhmh" (OuterVolumeSpecName: "kube-api-access-2jhmh") pod "5de866ec-f278-4793-953c-7fdd64f008d7" (UID: "5de866ec-f278-4793-953c-7fdd64f008d7"). InnerVolumeSpecName "kube-api-access-2jhmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.174454 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5de866ec-f278-4793-953c-7fdd64f008d7" (UID: "5de866ec-f278-4793-953c-7fdd64f008d7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.181922 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-inventory" (OuterVolumeSpecName: "inventory") pod "5de866ec-f278-4793-953c-7fdd64f008d7" (UID: "5de866ec-f278-4793-953c-7fdd64f008d7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.216863 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jhmh\" (UniqueName: \"kubernetes.io/projected/5de866ec-f278-4793-953c-7fdd64f008d7-kube-api-access-2jhmh\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.216906 5081 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.216923 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.216936 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.216949 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5de866ec-f278-4793-953c-7fdd64f008d7-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.589793 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" event={"ID":"5de866ec-f278-4793-953c-7fdd64f008d7","Type":"ContainerDied","Data":"330e3a1812129643da36afdae11f62f94eeeb6bf92f3f68a416ca22dafd8af7e"} Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.590143 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="330e3a1812129643da36afdae11f62f94eeeb6bf92f3f68a416ca22dafd8af7e" Oct 03 17:17:08 crc kubenswrapper[5081]: I1003 17:17:08.590387 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.137276 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6mvss"] Oct 03 17:17:12 crc kubenswrapper[5081]: E1003 17:17:12.138250 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5de866ec-f278-4793-953c-7fdd64f008d7" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.138269 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5de866ec-f278-4793-953c-7fdd64f008d7" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.138537 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5de866ec-f278-4793-953c-7fdd64f008d7" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.140448 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.149657 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6mvss"] Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.195596 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jmk4\" (UniqueName: \"kubernetes.io/projected/eb47c34d-7dd9-4589-9346-35a3a3130493-kube-api-access-8jmk4\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.195842 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-catalog-content\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.195887 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-utilities\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.297440 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-catalog-content\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.297480 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-utilities\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.297594 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jmk4\" (UniqueName: \"kubernetes.io/projected/eb47c34d-7dd9-4589-9346-35a3a3130493-kube-api-access-8jmk4\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.298587 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-catalog-content\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.298801 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-utilities\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.322456 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jmk4\" (UniqueName: \"kubernetes.io/projected/eb47c34d-7dd9-4589-9346-35a3a3130493-kube-api-access-8jmk4\") pod \"community-operators-6mvss\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.461398 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:12 crc kubenswrapper[5081]: I1003 17:17:12.995484 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6mvss"] Oct 03 17:17:13 crc kubenswrapper[5081]: I1003 17:17:13.654368 5081 generic.go:334] "Generic (PLEG): container finished" podID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerID="da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda" exitCode=0 Oct 03 17:17:13 crc kubenswrapper[5081]: I1003 17:17:13.654471 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvss" event={"ID":"eb47c34d-7dd9-4589-9346-35a3a3130493","Type":"ContainerDied","Data":"da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda"} Oct 03 17:17:13 crc kubenswrapper[5081]: I1003 17:17:13.654660 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvss" event={"ID":"eb47c34d-7dd9-4589-9346-35a3a3130493","Type":"ContainerStarted","Data":"479c07acb52879c5411b3b390504da927f851e19f43caff73f7c4c320745f33a"} Oct 03 17:17:14 crc kubenswrapper[5081]: I1003 17:17:14.666322 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvss" event={"ID":"eb47c34d-7dd9-4589-9346-35a3a3130493","Type":"ContainerStarted","Data":"ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952"} Oct 03 17:17:15 crc kubenswrapper[5081]: I1003 17:17:15.675551 5081 generic.go:334] "Generic (PLEG): container finished" podID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerID="ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952" exitCode=0 Oct 03 17:17:15 crc kubenswrapper[5081]: I1003 17:17:15.675616 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvss" event={"ID":"eb47c34d-7dd9-4589-9346-35a3a3130493","Type":"ContainerDied","Data":"ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952"} Oct 03 17:17:16 crc kubenswrapper[5081]: I1003 17:17:16.034593 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-mql6s"] Oct 03 17:17:16 crc kubenswrapper[5081]: I1003 17:17:16.043818 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-mql6s"] Oct 03 17:17:16 crc kubenswrapper[5081]: I1003 17:17:16.687523 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvss" event={"ID":"eb47c34d-7dd9-4589-9346-35a3a3130493","Type":"ContainerStarted","Data":"8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5"} Oct 03 17:17:16 crc kubenswrapper[5081]: I1003 17:17:16.720607 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6mvss" podStartSLOduration=2.195939461 podStartE2EDuration="4.720585982s" podCreationTimestamp="2025-10-03 17:17:12 +0000 UTC" firstStartedPulling="2025-10-03 17:17:13.657063988 +0000 UTC m=+6552.622620601" lastFinishedPulling="2025-10-03 17:17:16.181710509 +0000 UTC m=+6555.147267122" observedRunningTime="2025-10-03 17:17:16.715925649 +0000 UTC m=+6555.681482282" watchObservedRunningTime="2025-10-03 17:17:16.720585982 +0000 UTC m=+6555.686142595" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.299035 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9"] Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.300744 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.305152 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.305254 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.305438 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.305795 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.321394 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9"] Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.412726 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.412835 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.412983 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.413017 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.413052 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8z8t\" (UniqueName: \"kubernetes.io/projected/bd0f854a-61ae-40aa-b100-ae4918c3dcea-kube-api-access-b8z8t\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.514987 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.515108 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.515138 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.515173 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8z8t\" (UniqueName: \"kubernetes.io/projected/bd0f854a-61ae-40aa-b100-ae4918c3dcea-kube-api-access-b8z8t\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.515278 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.522404 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.523048 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.526834 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.536959 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.539879 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8z8t\" (UniqueName: \"kubernetes.io/projected/bd0f854a-61ae-40aa-b100-ae4918c3dcea-kube-api-access-b8z8t\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.625307 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.828166 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:17:17 crc kubenswrapper[5081]: E1003 17:17:17.828991 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:17:17 crc kubenswrapper[5081]: I1003 17:17:17.847773 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7" path="/var/lib/kubelet/pods/5cd884cb-d4ea-4d6d-acfe-f6483bfe9fb7/volumes" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.220651 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9"] Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.729054 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" event={"ID":"bd0f854a-61ae-40aa-b100-ae4918c3dcea","Type":"ContainerStarted","Data":"7046e8700af0bfef962dad6ff6e0a8aa347f1f17006503ab59090f8307a9831f"} Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.742607 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-flcbk"] Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.749301 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.755329 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-flcbk"] Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.848544 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-utilities\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.848810 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-catalog-content\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.849011 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8m8m\" (UniqueName: \"kubernetes.io/projected/a40e1426-8d4d-4474-af37-3a76dddc47cf-kube-api-access-m8m8m\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.951069 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-utilities\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.951178 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-catalog-content\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.951275 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8m8m\" (UniqueName: \"kubernetes.io/projected/a40e1426-8d4d-4474-af37-3a76dddc47cf-kube-api-access-m8m8m\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.951543 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-utilities\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.951915 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-catalog-content\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:18 crc kubenswrapper[5081]: I1003 17:17:18.971901 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8m8m\" (UniqueName: \"kubernetes.io/projected/a40e1426-8d4d-4474-af37-3a76dddc47cf-kube-api-access-m8m8m\") pod \"certified-operators-flcbk\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:19 crc kubenswrapper[5081]: I1003 17:17:19.165847 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:19 crc kubenswrapper[5081]: I1003 17:17:19.742634 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" event={"ID":"bd0f854a-61ae-40aa-b100-ae4918c3dcea","Type":"ContainerStarted","Data":"cbf77889376ccc42f698c738fa7341b2143547c4c37a4ac24899e5942e2cf115"} Oct 03 17:17:19 crc kubenswrapper[5081]: I1003 17:17:19.742955 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-flcbk"] Oct 03 17:17:19 crc kubenswrapper[5081]: W1003 17:17:19.747578 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda40e1426_8d4d_4474_af37_3a76dddc47cf.slice/crio-d35d32c8b8c6b1186a3466088e2682a04ef59ee6b8cd2e2c43f70c1cb6e66228 WatchSource:0}: Error finding container d35d32c8b8c6b1186a3466088e2682a04ef59ee6b8cd2e2c43f70c1cb6e66228: Status 404 returned error can't find the container with id d35d32c8b8c6b1186a3466088e2682a04ef59ee6b8cd2e2c43f70c1cb6e66228 Oct 03 17:17:19 crc kubenswrapper[5081]: I1003 17:17:19.762685 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" podStartSLOduration=2.309926765 podStartE2EDuration="2.762662011s" podCreationTimestamp="2025-10-03 17:17:17 +0000 UTC" firstStartedPulling="2025-10-03 17:17:18.216968965 +0000 UTC m=+6557.182525588" lastFinishedPulling="2025-10-03 17:17:18.669704211 +0000 UTC m=+6557.635260834" observedRunningTime="2025-10-03 17:17:19.760054226 +0000 UTC m=+6558.725610849" watchObservedRunningTime="2025-10-03 17:17:19.762662011 +0000 UTC m=+6558.728218624" Oct 03 17:17:20 crc kubenswrapper[5081]: I1003 17:17:20.761165 5081 generic.go:334] "Generic (PLEG): container finished" podID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerID="3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6" exitCode=0 Oct 03 17:17:20 crc kubenswrapper[5081]: I1003 17:17:20.761618 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flcbk" event={"ID":"a40e1426-8d4d-4474-af37-3a76dddc47cf","Type":"ContainerDied","Data":"3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6"} Oct 03 17:17:20 crc kubenswrapper[5081]: I1003 17:17:20.761660 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flcbk" event={"ID":"a40e1426-8d4d-4474-af37-3a76dddc47cf","Type":"ContainerStarted","Data":"d35d32c8b8c6b1186a3466088e2682a04ef59ee6b8cd2e2c43f70c1cb6e66228"} Oct 03 17:17:21 crc kubenswrapper[5081]: I1003 17:17:21.772170 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flcbk" event={"ID":"a40e1426-8d4d-4474-af37-3a76dddc47cf","Type":"ContainerStarted","Data":"788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767"} Oct 03 17:17:22 crc kubenswrapper[5081]: I1003 17:17:22.462338 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:22 crc kubenswrapper[5081]: I1003 17:17:22.462742 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:22 crc kubenswrapper[5081]: I1003 17:17:22.532342 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:22 crc kubenswrapper[5081]: I1003 17:17:22.840703 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:23 crc kubenswrapper[5081]: I1003 17:17:23.797005 5081 generic.go:334] "Generic (PLEG): container finished" podID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerID="788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767" exitCode=0 Oct 03 17:17:23 crc kubenswrapper[5081]: I1003 17:17:23.797086 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flcbk" event={"ID":"a40e1426-8d4d-4474-af37-3a76dddc47cf","Type":"ContainerDied","Data":"788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767"} Oct 03 17:17:24 crc kubenswrapper[5081]: I1003 17:17:24.129861 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6mvss"] Oct 03 17:17:24 crc kubenswrapper[5081]: I1003 17:17:24.830573 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flcbk" event={"ID":"a40e1426-8d4d-4474-af37-3a76dddc47cf","Type":"ContainerStarted","Data":"334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7"} Oct 03 17:17:24 crc kubenswrapper[5081]: I1003 17:17:24.830691 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6mvss" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="registry-server" containerID="cri-o://8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5" gracePeriod=2 Oct 03 17:17:24 crc kubenswrapper[5081]: I1003 17:17:24.861603 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-flcbk" podStartSLOduration=3.438328167 podStartE2EDuration="6.861584444s" podCreationTimestamp="2025-10-03 17:17:18 +0000 UTC" firstStartedPulling="2025-10-03 17:17:20.768870556 +0000 UTC m=+6559.734427169" lastFinishedPulling="2025-10-03 17:17:24.192126833 +0000 UTC m=+6563.157683446" observedRunningTime="2025-10-03 17:17:24.858862596 +0000 UTC m=+6563.824419229" watchObservedRunningTime="2025-10-03 17:17:24.861584444 +0000 UTC m=+6563.827141057" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.440364 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.519345 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-catalog-content\") pod \"eb47c34d-7dd9-4589-9346-35a3a3130493\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.519484 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jmk4\" (UniqueName: \"kubernetes.io/projected/eb47c34d-7dd9-4589-9346-35a3a3130493-kube-api-access-8jmk4\") pod \"eb47c34d-7dd9-4589-9346-35a3a3130493\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.519638 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-utilities\") pod \"eb47c34d-7dd9-4589-9346-35a3a3130493\" (UID: \"eb47c34d-7dd9-4589-9346-35a3a3130493\") " Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.520952 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-utilities" (OuterVolumeSpecName: "utilities") pod "eb47c34d-7dd9-4589-9346-35a3a3130493" (UID: "eb47c34d-7dd9-4589-9346-35a3a3130493"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.529204 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb47c34d-7dd9-4589-9346-35a3a3130493-kube-api-access-8jmk4" (OuterVolumeSpecName: "kube-api-access-8jmk4") pod "eb47c34d-7dd9-4589-9346-35a3a3130493" (UID: "eb47c34d-7dd9-4589-9346-35a3a3130493"). InnerVolumeSpecName "kube-api-access-8jmk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.586636 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb47c34d-7dd9-4589-9346-35a3a3130493" (UID: "eb47c34d-7dd9-4589-9346-35a3a3130493"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.622265 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jmk4\" (UniqueName: \"kubernetes.io/projected/eb47c34d-7dd9-4589-9346-35a3a3130493-kube-api-access-8jmk4\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.622311 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.622322 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb47c34d-7dd9-4589-9346-35a3a3130493-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.859375 5081 generic.go:334] "Generic (PLEG): container finished" podID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerID="8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5" exitCode=0 Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.859715 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvss" event={"ID":"eb47c34d-7dd9-4589-9346-35a3a3130493","Type":"ContainerDied","Data":"8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5"} Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.859738 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6mvss" event={"ID":"eb47c34d-7dd9-4589-9346-35a3a3130493","Type":"ContainerDied","Data":"479c07acb52879c5411b3b390504da927f851e19f43caff73f7c4c320745f33a"} Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.859755 5081 scope.go:117] "RemoveContainer" containerID="8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.859871 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6mvss" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.895278 5081 scope.go:117] "RemoveContainer" containerID="ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.903345 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6mvss"] Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.914253 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6mvss"] Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.936432 5081 scope.go:117] "RemoveContainer" containerID="da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.991340 5081 scope.go:117] "RemoveContainer" containerID="8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5" Oct 03 17:17:25 crc kubenswrapper[5081]: E1003 17:17:25.991840 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5\": container with ID starting with 8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5 not found: ID does not exist" containerID="8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.991874 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5"} err="failed to get container status \"8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5\": rpc error: code = NotFound desc = could not find container \"8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5\": container with ID starting with 8d3e2e8ae35e809a8842e485f1dca6de2172ccf0a5ac528d490ae8a98ddb5bb5 not found: ID does not exist" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.991897 5081 scope.go:117] "RemoveContainer" containerID="ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952" Oct 03 17:17:25 crc kubenswrapper[5081]: E1003 17:17:25.992098 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952\": container with ID starting with ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952 not found: ID does not exist" containerID="ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.992125 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952"} err="failed to get container status \"ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952\": rpc error: code = NotFound desc = could not find container \"ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952\": container with ID starting with ef4a3e85e5c0e8ce535fce9e6df3ba62629660d96a70030c2710e368a6ecd952 not found: ID does not exist" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.992140 5081 scope.go:117] "RemoveContainer" containerID="da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda" Oct 03 17:17:25 crc kubenswrapper[5081]: E1003 17:17:25.992632 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda\": container with ID starting with da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda not found: ID does not exist" containerID="da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda" Oct 03 17:17:25 crc kubenswrapper[5081]: I1003 17:17:25.992737 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda"} err="failed to get container status \"da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda\": rpc error: code = NotFound desc = could not find container \"da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda\": container with ID starting with da86ab6c106562c059d9480f9248828f4776bde81f149c13175b4ab1da698eda not found: ID does not exist" Oct 03 17:17:26 crc kubenswrapper[5081]: I1003 17:17:26.682258 5081 scope.go:117] "RemoveContainer" containerID="685d642eccea0bd6502038733fb97ef85cefea018794e17b25aa89e6f1f9ebfe" Oct 03 17:17:27 crc kubenswrapper[5081]: I1003 17:17:27.840052 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" path="/var/lib/kubelet/pods/eb47c34d-7dd9-4589-9346-35a3a3130493/volumes" Oct 03 17:17:29 crc kubenswrapper[5081]: I1003 17:17:29.040492 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-3907-account-create-7fvgs"] Oct 03 17:17:29 crc kubenswrapper[5081]: I1003 17:17:29.051830 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-3907-account-create-7fvgs"] Oct 03 17:17:29 crc kubenswrapper[5081]: I1003 17:17:29.166910 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:29 crc kubenswrapper[5081]: I1003 17:17:29.166950 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:29 crc kubenswrapper[5081]: I1003 17:17:29.220666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:29 crc kubenswrapper[5081]: I1003 17:17:29.846422 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a742149-dad8-4a69-a1ba-ec38349e2bf7" path="/var/lib/kubelet/pods/2a742149-dad8-4a69-a1ba-ec38349e2bf7/volumes" Oct 03 17:17:29 crc kubenswrapper[5081]: I1003 17:17:29.966666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:30 crc kubenswrapper[5081]: I1003 17:17:30.331808 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-flcbk"] Oct 03 17:17:31 crc kubenswrapper[5081]: I1003 17:17:31.936384 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-flcbk" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="registry-server" containerID="cri-o://334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7" gracePeriod=2 Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.462164 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.495052 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-utilities\") pod \"a40e1426-8d4d-4474-af37-3a76dddc47cf\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.495119 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8m8m\" (UniqueName: \"kubernetes.io/projected/a40e1426-8d4d-4474-af37-3a76dddc47cf-kube-api-access-m8m8m\") pod \"a40e1426-8d4d-4474-af37-3a76dddc47cf\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.495158 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-catalog-content\") pod \"a40e1426-8d4d-4474-af37-3a76dddc47cf\" (UID: \"a40e1426-8d4d-4474-af37-3a76dddc47cf\") " Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.496777 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-utilities" (OuterVolumeSpecName: "utilities") pod "a40e1426-8d4d-4474-af37-3a76dddc47cf" (UID: "a40e1426-8d4d-4474-af37-3a76dddc47cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.502973 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a40e1426-8d4d-4474-af37-3a76dddc47cf-kube-api-access-m8m8m" (OuterVolumeSpecName: "kube-api-access-m8m8m") pod "a40e1426-8d4d-4474-af37-3a76dddc47cf" (UID: "a40e1426-8d4d-4474-af37-3a76dddc47cf"). InnerVolumeSpecName "kube-api-access-m8m8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.575502 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a40e1426-8d4d-4474-af37-3a76dddc47cf" (UID: "a40e1426-8d4d-4474-af37-3a76dddc47cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.599069 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.599155 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8m8m\" (UniqueName: \"kubernetes.io/projected/a40e1426-8d4d-4474-af37-3a76dddc47cf-kube-api-access-m8m8m\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.599172 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a40e1426-8d4d-4474-af37-3a76dddc47cf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.828294 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:17:32 crc kubenswrapper[5081]: E1003 17:17:32.828713 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.946848 5081 generic.go:334] "Generic (PLEG): container finished" podID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerID="334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7" exitCode=0 Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.946892 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flcbk" event={"ID":"a40e1426-8d4d-4474-af37-3a76dddc47cf","Type":"ContainerDied","Data":"334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7"} Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.946917 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flcbk" event={"ID":"a40e1426-8d4d-4474-af37-3a76dddc47cf","Type":"ContainerDied","Data":"d35d32c8b8c6b1186a3466088e2682a04ef59ee6b8cd2e2c43f70c1cb6e66228"} Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.946931 5081 scope.go:117] "RemoveContainer" containerID="334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.946933 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-flcbk" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.972061 5081 scope.go:117] "RemoveContainer" containerID="788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767" Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.983944 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-flcbk"] Oct 03 17:17:32 crc kubenswrapper[5081]: I1003 17:17:32.992810 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-flcbk"] Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.019520 5081 scope.go:117] "RemoveContainer" containerID="3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6" Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.049273 5081 scope.go:117] "RemoveContainer" containerID="334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7" Oct 03 17:17:33 crc kubenswrapper[5081]: E1003 17:17:33.049690 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7\": container with ID starting with 334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7 not found: ID does not exist" containerID="334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7" Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.049761 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7"} err="failed to get container status \"334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7\": rpc error: code = NotFound desc = could not find container \"334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7\": container with ID starting with 334a02ff5ea63616d66f4f2be20c509d3bbc25254f78b53f63af618d12da9fc7 not found: ID does not exist" Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.049846 5081 scope.go:117] "RemoveContainer" containerID="788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767" Oct 03 17:17:33 crc kubenswrapper[5081]: E1003 17:17:33.050181 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767\": container with ID starting with 788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767 not found: ID does not exist" containerID="788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767" Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.050238 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767"} err="failed to get container status \"788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767\": rpc error: code = NotFound desc = could not find container \"788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767\": container with ID starting with 788c1b05aa67920874479d4636351f052bfd9074e8a3b49581f17efcebf25767 not found: ID does not exist" Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.050256 5081 scope.go:117] "RemoveContainer" containerID="3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6" Oct 03 17:17:33 crc kubenswrapper[5081]: E1003 17:17:33.050721 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6\": container with ID starting with 3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6 not found: ID does not exist" containerID="3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6" Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.050749 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6"} err="failed to get container status \"3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6\": rpc error: code = NotFound desc = could not find container \"3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6\": container with ID starting with 3735cf1ab8ad5270f5c2369f96d16607ece23c92e3a180e0bc36f2bc3dd7c9d6 not found: ID does not exist" Oct 03 17:17:33 crc kubenswrapper[5081]: I1003 17:17:33.846381 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" path="/var/lib/kubelet/pods/a40e1426-8d4d-4474-af37-3a76dddc47cf/volumes" Oct 03 17:17:36 crc kubenswrapper[5081]: I1003 17:17:36.035995 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-mpj7m"] Oct 03 17:17:36 crc kubenswrapper[5081]: I1003 17:17:36.049217 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-mpj7m"] Oct 03 17:17:37 crc kubenswrapper[5081]: I1003 17:17:37.846844 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e0e8a17-18fb-41f0-b2b5-8a5bccb86233" path="/var/lib/kubelet/pods/4e0e8a17-18fb-41f0-b2b5-8a5bccb86233/volumes" Oct 03 17:17:45 crc kubenswrapper[5081]: I1003 17:17:45.828272 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:17:45 crc kubenswrapper[5081]: E1003 17:17:45.830040 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:17:47 crc kubenswrapper[5081]: I1003 17:17:47.035265 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-198a-account-create-4rxzm"] Oct 03 17:17:47 crc kubenswrapper[5081]: I1003 17:17:47.045518 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-198a-account-create-4rxzm"] Oct 03 17:17:47 crc kubenswrapper[5081]: I1003 17:17:47.840733 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c" path="/var/lib/kubelet/pods/7a4ef053-58bb-4fd0-bf6f-8a9873a21c7c/volumes" Oct 03 17:17:57 crc kubenswrapper[5081]: I1003 17:17:57.827872 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:17:57 crc kubenswrapper[5081]: E1003 17:17:57.828677 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:18:12 crc kubenswrapper[5081]: I1003 17:18:12.828209 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:18:12 crc kubenswrapper[5081]: E1003 17:18:12.829152 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:18:19 crc kubenswrapper[5081]: I1003 17:18:19.047391 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-tzr4h"] Oct 03 17:18:19 crc kubenswrapper[5081]: I1003 17:18:19.058818 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-tzr4h"] Oct 03 17:18:19 crc kubenswrapper[5081]: I1003 17:18:19.842816 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a96f5ea-1a87-469f-9b6d-0c2501e89bcf" path="/var/lib/kubelet/pods/2a96f5ea-1a87-469f-9b6d-0c2501e89bcf/volumes" Oct 03 17:18:23 crc kubenswrapper[5081]: I1003 17:18:23.829200 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:18:23 crc kubenswrapper[5081]: E1003 17:18:23.829909 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:18:26 crc kubenswrapper[5081]: I1003 17:18:26.773872 5081 scope.go:117] "RemoveContainer" containerID="c72350e52513b82f61af0606751a0e0dfe1a9543042b94facb5cd2041f796654" Oct 03 17:18:26 crc kubenswrapper[5081]: I1003 17:18:26.811328 5081 scope.go:117] "RemoveContainer" containerID="4b3034e38b1adf6d3ff1696a40dba4cbbda629e6536ed4553040cbe82215cfe7" Oct 03 17:18:26 crc kubenswrapper[5081]: I1003 17:18:26.863093 5081 scope.go:117] "RemoveContainer" containerID="e34204778cb158e84b7ff6d9ff701561ff4d565956ef51329080d9d79c771e3e" Oct 03 17:18:26 crc kubenswrapper[5081]: I1003 17:18:26.920561 5081 scope.go:117] "RemoveContainer" containerID="aed667435271dab12f1496ec369cd0cb153f08434c483ae900f4f34c92ba0330" Oct 03 17:18:26 crc kubenswrapper[5081]: I1003 17:18:26.966747 5081 scope.go:117] "RemoveContainer" containerID="87bf62b8a7f2de6fb195a94c1444e7c73335244576c9f418c15bb8983b9f6224" Oct 03 17:18:36 crc kubenswrapper[5081]: I1003 17:18:36.827735 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:18:36 crc kubenswrapper[5081]: E1003 17:18:36.828472 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:18:50 crc kubenswrapper[5081]: I1003 17:18:50.828962 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:18:50 crc kubenswrapper[5081]: E1003 17:18:50.830357 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:19:02 crc kubenswrapper[5081]: I1003 17:19:02.828995 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:19:02 crc kubenswrapper[5081]: E1003 17:19:02.829866 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:19:17 crc kubenswrapper[5081]: I1003 17:19:17.828091 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:19:17 crc kubenswrapper[5081]: E1003 17:19:17.830091 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:19:28 crc kubenswrapper[5081]: I1003 17:19:28.828190 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:19:28 crc kubenswrapper[5081]: E1003 17:19:28.829089 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:19:42 crc kubenswrapper[5081]: I1003 17:19:42.827554 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:19:43 crc kubenswrapper[5081]: I1003 17:19:43.242581 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"154bc795d4a1a5cb564cc35ce4ea861ece0a07ed2452d026738a195bd65f2f04"} Oct 03 17:21:28 crc kubenswrapper[5081]: I1003 17:21:28.055277 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-k4jsb"] Oct 03 17:21:28 crc kubenswrapper[5081]: I1003 17:21:28.064730 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-k4jsb"] Oct 03 17:21:29 crc kubenswrapper[5081]: I1003 17:21:29.843136 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09af7c8f-dcd4-4379-905e-ecf7234f9432" path="/var/lib/kubelet/pods/09af7c8f-dcd4-4379-905e-ecf7234f9432/volumes" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.354474 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mjt6v"] Oct 03 17:21:37 crc kubenswrapper[5081]: E1003 17:21:37.355498 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="registry-server" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355513 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="registry-server" Oct 03 17:21:37 crc kubenswrapper[5081]: E1003 17:21:37.355546 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="extract-content" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355552 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="extract-content" Oct 03 17:21:37 crc kubenswrapper[5081]: E1003 17:21:37.355589 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="registry-server" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355596 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="registry-server" Oct 03 17:21:37 crc kubenswrapper[5081]: E1003 17:21:37.355611 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="extract-utilities" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355619 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="extract-utilities" Oct 03 17:21:37 crc kubenswrapper[5081]: E1003 17:21:37.355643 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="extract-content" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355650 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="extract-content" Oct 03 17:21:37 crc kubenswrapper[5081]: E1003 17:21:37.355664 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="extract-utilities" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355671 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="extract-utilities" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355863 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb47c34d-7dd9-4589-9346-35a3a3130493" containerName="registry-server" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.355893 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a40e1426-8d4d-4474-af37-3a76dddc47cf" containerName="registry-server" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.357420 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.364757 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjt6v"] Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.473612 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzz5j\" (UniqueName: \"kubernetes.io/projected/b6364e11-1158-4877-b0d1-06d056ba5242-kube-api-access-xzz5j\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.473681 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-utilities\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.473968 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-catalog-content\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.576848 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-catalog-content\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.577058 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzz5j\" (UniqueName: \"kubernetes.io/projected/b6364e11-1158-4877-b0d1-06d056ba5242-kube-api-access-xzz5j\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.577100 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-utilities\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.577818 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-utilities\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.577864 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-catalog-content\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.601869 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzz5j\" (UniqueName: \"kubernetes.io/projected/b6364e11-1158-4877-b0d1-06d056ba5242-kube-api-access-xzz5j\") pod \"redhat-marketplace-mjt6v\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:37 crc kubenswrapper[5081]: I1003 17:21:37.678784 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:38 crc kubenswrapper[5081]: I1003 17:21:38.054078 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-6524-account-create-92zkv"] Oct 03 17:21:38 crc kubenswrapper[5081]: I1003 17:21:38.067776 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-6524-account-create-92zkv"] Oct 03 17:21:38 crc kubenswrapper[5081]: I1003 17:21:38.140421 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjt6v"] Oct 03 17:21:38 crc kubenswrapper[5081]: I1003 17:21:38.328698 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjt6v" event={"ID":"b6364e11-1158-4877-b0d1-06d056ba5242","Type":"ContainerStarted","Data":"cc05b7d93d804836bd5302fd74fec9730b229466ad40a38c80ce4c5ef3588497"} Oct 03 17:21:39 crc kubenswrapper[5081]: I1003 17:21:39.340432 5081 generic.go:334] "Generic (PLEG): container finished" podID="b6364e11-1158-4877-b0d1-06d056ba5242" containerID="5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4" exitCode=0 Oct 03 17:21:39 crc kubenswrapper[5081]: I1003 17:21:39.340487 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjt6v" event={"ID":"b6364e11-1158-4877-b0d1-06d056ba5242","Type":"ContainerDied","Data":"5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4"} Oct 03 17:21:39 crc kubenswrapper[5081]: I1003 17:21:39.344050 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:21:39 crc kubenswrapper[5081]: I1003 17:21:39.840039 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65e88ab0-0172-4a1a-82ca-3549c445210a" path="/var/lib/kubelet/pods/65e88ab0-0172-4a1a-82ca-3549c445210a/volumes" Oct 03 17:21:40 crc kubenswrapper[5081]: I1003 17:21:40.356321 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjt6v" event={"ID":"b6364e11-1158-4877-b0d1-06d056ba5242","Type":"ContainerStarted","Data":"a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008"} Oct 03 17:21:41 crc kubenswrapper[5081]: I1003 17:21:41.369306 5081 generic.go:334] "Generic (PLEG): container finished" podID="b6364e11-1158-4877-b0d1-06d056ba5242" containerID="a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008" exitCode=0 Oct 03 17:21:41 crc kubenswrapper[5081]: I1003 17:21:41.369360 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjt6v" event={"ID":"b6364e11-1158-4877-b0d1-06d056ba5242","Type":"ContainerDied","Data":"a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008"} Oct 03 17:21:42 crc kubenswrapper[5081]: I1003 17:21:42.397039 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjt6v" event={"ID":"b6364e11-1158-4877-b0d1-06d056ba5242","Type":"ContainerStarted","Data":"d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8"} Oct 03 17:21:42 crc kubenswrapper[5081]: I1003 17:21:42.441795 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mjt6v" podStartSLOduration=2.956597812 podStartE2EDuration="5.441772015s" podCreationTimestamp="2025-10-03 17:21:37 +0000 UTC" firstStartedPulling="2025-10-03 17:21:39.343832084 +0000 UTC m=+6818.309388697" lastFinishedPulling="2025-10-03 17:21:41.829006277 +0000 UTC m=+6820.794562900" observedRunningTime="2025-10-03 17:21:42.426667732 +0000 UTC m=+6821.392224345" watchObservedRunningTime="2025-10-03 17:21:42.441772015 +0000 UTC m=+6821.407328638" Oct 03 17:21:47 crc kubenswrapper[5081]: I1003 17:21:47.681778 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:47 crc kubenswrapper[5081]: I1003 17:21:47.683537 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:47 crc kubenswrapper[5081]: I1003 17:21:47.740728 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:48 crc kubenswrapper[5081]: I1003 17:21:48.523811 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:48 crc kubenswrapper[5081]: I1003 17:21:48.574031 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjt6v"] Oct 03 17:21:50 crc kubenswrapper[5081]: I1003 17:21:50.485804 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mjt6v" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="registry-server" containerID="cri-o://d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8" gracePeriod=2 Oct 03 17:21:50 crc kubenswrapper[5081]: I1003 17:21:50.996886 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.105283 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzz5j\" (UniqueName: \"kubernetes.io/projected/b6364e11-1158-4877-b0d1-06d056ba5242-kube-api-access-xzz5j\") pod \"b6364e11-1158-4877-b0d1-06d056ba5242\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.106628 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-utilities\") pod \"b6364e11-1158-4877-b0d1-06d056ba5242\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.106769 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-catalog-content\") pod \"b6364e11-1158-4877-b0d1-06d056ba5242\" (UID: \"b6364e11-1158-4877-b0d1-06d056ba5242\") " Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.107446 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-utilities" (OuterVolumeSpecName: "utilities") pod "b6364e11-1158-4877-b0d1-06d056ba5242" (UID: "b6364e11-1158-4877-b0d1-06d056ba5242"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.117305 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6364e11-1158-4877-b0d1-06d056ba5242-kube-api-access-xzz5j" (OuterVolumeSpecName: "kube-api-access-xzz5j") pod "b6364e11-1158-4877-b0d1-06d056ba5242" (UID: "b6364e11-1158-4877-b0d1-06d056ba5242"). InnerVolumeSpecName "kube-api-access-xzz5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.122896 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6364e11-1158-4877-b0d1-06d056ba5242" (UID: "b6364e11-1158-4877-b0d1-06d056ba5242"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.208956 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.208989 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6364e11-1158-4877-b0d1-06d056ba5242-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.209001 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzz5j\" (UniqueName: \"kubernetes.io/projected/b6364e11-1158-4877-b0d1-06d056ba5242-kube-api-access-xzz5j\") on node \"crc\" DevicePath \"\"" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.513397 5081 generic.go:334] "Generic (PLEG): container finished" podID="b6364e11-1158-4877-b0d1-06d056ba5242" containerID="d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8" exitCode=0 Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.513455 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjt6v" event={"ID":"b6364e11-1158-4877-b0d1-06d056ba5242","Type":"ContainerDied","Data":"d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8"} Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.513496 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjt6v" event={"ID":"b6364e11-1158-4877-b0d1-06d056ba5242","Type":"ContainerDied","Data":"cc05b7d93d804836bd5302fd74fec9730b229466ad40a38c80ce4c5ef3588497"} Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.513516 5081 scope.go:117] "RemoveContainer" containerID="d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.513530 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjt6v" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.543595 5081 scope.go:117] "RemoveContainer" containerID="a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.564022 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjt6v"] Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.572199 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjt6v"] Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.572470 5081 scope.go:117] "RemoveContainer" containerID="5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.619980 5081 scope.go:117] "RemoveContainer" containerID="d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8" Oct 03 17:21:51 crc kubenswrapper[5081]: E1003 17:21:51.620593 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8\": container with ID starting with d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8 not found: ID does not exist" containerID="d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.620631 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8"} err="failed to get container status \"d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8\": rpc error: code = NotFound desc = could not find container \"d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8\": container with ID starting with d04042d01556515d117cfe1de8ffe6ca7fd2b23aee961a996a9cf5d2f2f841c8 not found: ID does not exist" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.620652 5081 scope.go:117] "RemoveContainer" containerID="a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008" Oct 03 17:21:51 crc kubenswrapper[5081]: E1003 17:21:51.621056 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008\": container with ID starting with a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008 not found: ID does not exist" containerID="a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.621083 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008"} err="failed to get container status \"a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008\": rpc error: code = NotFound desc = could not find container \"a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008\": container with ID starting with a9a53fdd3c0f2832d1e5108e1941b4d6240b208dd366f67fefa81328aaa35008 not found: ID does not exist" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.621097 5081 scope.go:117] "RemoveContainer" containerID="5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4" Oct 03 17:21:51 crc kubenswrapper[5081]: E1003 17:21:51.621318 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4\": container with ID starting with 5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4 not found: ID does not exist" containerID="5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.621339 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4"} err="failed to get container status \"5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4\": rpc error: code = NotFound desc = could not find container \"5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4\": container with ID starting with 5a11c45539b800b3ebd6f0ad6d87a5fd02387876279def663b02d56a859670c4 not found: ID does not exist" Oct 03 17:21:51 crc kubenswrapper[5081]: I1003 17:21:51.840970 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" path="/var/lib/kubelet/pods/b6364e11-1158-4877-b0d1-06d056ba5242/volumes" Oct 03 17:21:52 crc kubenswrapper[5081]: I1003 17:21:52.030492 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-s9zm6"] Oct 03 17:21:52 crc kubenswrapper[5081]: I1003 17:21:52.042304 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-s9zm6"] Oct 03 17:21:53 crc kubenswrapper[5081]: I1003 17:21:53.841247 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1773a7b7-861b-4115-b891-31f17d7b0281" path="/var/lib/kubelet/pods/1773a7b7-861b-4115-b891-31f17d7b0281/volumes" Oct 03 17:22:00 crc kubenswrapper[5081]: I1003 17:22:00.647971 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:22:00 crc kubenswrapper[5081]: I1003 17:22:00.648548 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:22:27 crc kubenswrapper[5081]: I1003 17:22:27.207341 5081 scope.go:117] "RemoveContainer" containerID="e3c20c1fc41fbd5c2894ff3e9d7d716afb06768bb159497319e596b23b726c1d" Oct 03 17:22:27 crc kubenswrapper[5081]: I1003 17:22:27.236941 5081 scope.go:117] "RemoveContainer" containerID="2421d228f7c1b26a3eaff56cb1c8c82f1827dafed6d8900b0e6c0342e74059bb" Oct 03 17:22:27 crc kubenswrapper[5081]: I1003 17:22:27.300278 5081 scope.go:117] "RemoveContainer" containerID="7fa3b69be2cfce7efa81b95cbc72806892929cf7ddb0e75a5961d90b46be2afe" Oct 03 17:22:27 crc kubenswrapper[5081]: I1003 17:22:27.354349 5081 scope.go:117] "RemoveContainer" containerID="5e0eeb4ed5c1d005c6221cf5d3cbc239c2b676d769d794dfd0b6717c3d768167" Oct 03 17:22:27 crc kubenswrapper[5081]: I1003 17:22:27.391037 5081 scope.go:117] "RemoveContainer" containerID="7730edc6c2532e5e8c2c8c95e3cdaf27d0786ce665748fe56d8d6fd5dd0a2377" Oct 03 17:22:30 crc kubenswrapper[5081]: I1003 17:22:30.647944 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:22:30 crc kubenswrapper[5081]: I1003 17:22:30.648439 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:23:00 crc kubenswrapper[5081]: I1003 17:23:00.648071 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:23:00 crc kubenswrapper[5081]: I1003 17:23:00.648622 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:23:00 crc kubenswrapper[5081]: I1003 17:23:00.648666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:23:00 crc kubenswrapper[5081]: I1003 17:23:00.649435 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"154bc795d4a1a5cb564cc35ce4ea861ece0a07ed2452d026738a195bd65f2f04"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:23:00 crc kubenswrapper[5081]: I1003 17:23:00.649494 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://154bc795d4a1a5cb564cc35ce4ea861ece0a07ed2452d026738a195bd65f2f04" gracePeriod=600 Oct 03 17:23:01 crc kubenswrapper[5081]: I1003 17:23:01.154096 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="154bc795d4a1a5cb564cc35ce4ea861ece0a07ed2452d026738a195bd65f2f04" exitCode=0 Oct 03 17:23:01 crc kubenswrapper[5081]: I1003 17:23:01.154166 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"154bc795d4a1a5cb564cc35ce4ea861ece0a07ed2452d026738a195bd65f2f04"} Oct 03 17:23:01 crc kubenswrapper[5081]: I1003 17:23:01.154371 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2"} Oct 03 17:23:01 crc kubenswrapper[5081]: I1003 17:23:01.154391 5081 scope.go:117] "RemoveContainer" containerID="dfee367e4fc0b7b10a620616fed0dce76353d13b05b09737e7d78fed80e1da5f" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.112200 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h7q2c"] Oct 03 17:23:13 crc kubenswrapper[5081]: E1003 17:23:13.113396 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="extract-utilities" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.113416 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="extract-utilities" Oct 03 17:23:13 crc kubenswrapper[5081]: E1003 17:23:13.113440 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="registry-server" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.113449 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="registry-server" Oct 03 17:23:13 crc kubenswrapper[5081]: E1003 17:23:13.113517 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="extract-content" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.113525 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="extract-content" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.114396 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6364e11-1158-4877-b0d1-06d056ba5242" containerName="registry-server" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.116351 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.121641 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h7q2c"] Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.265043 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-catalog-content\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.265196 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85gpx\" (UniqueName: \"kubernetes.io/projected/6199779f-33e9-4eba-bfe7-8b3e3940c824-kube-api-access-85gpx\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.265259 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-utilities\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.367373 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-utilities\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.367498 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-catalog-content\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.367657 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85gpx\" (UniqueName: \"kubernetes.io/projected/6199779f-33e9-4eba-bfe7-8b3e3940c824-kube-api-access-85gpx\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.368240 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-utilities\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.368271 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-catalog-content\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.389011 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85gpx\" (UniqueName: \"kubernetes.io/projected/6199779f-33e9-4eba-bfe7-8b3e3940c824-kube-api-access-85gpx\") pod \"redhat-operators-h7q2c\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.454509 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:13 crc kubenswrapper[5081]: I1003 17:23:13.968108 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h7q2c"] Oct 03 17:23:14 crc kubenswrapper[5081]: I1003 17:23:14.279026 5081 generic.go:334] "Generic (PLEG): container finished" podID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerID="192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4" exitCode=0 Oct 03 17:23:14 crc kubenswrapper[5081]: I1003 17:23:14.279080 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7q2c" event={"ID":"6199779f-33e9-4eba-bfe7-8b3e3940c824","Type":"ContainerDied","Data":"192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4"} Oct 03 17:23:14 crc kubenswrapper[5081]: I1003 17:23:14.280031 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7q2c" event={"ID":"6199779f-33e9-4eba-bfe7-8b3e3940c824","Type":"ContainerStarted","Data":"04603dec5a185c2499b3c8f364917a447c73db63c6b430255ccfdf4cb5a41304"} Oct 03 17:23:16 crc kubenswrapper[5081]: I1003 17:23:16.305688 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7q2c" event={"ID":"6199779f-33e9-4eba-bfe7-8b3e3940c824","Type":"ContainerStarted","Data":"b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261"} Oct 03 17:23:19 crc kubenswrapper[5081]: I1003 17:23:19.334328 5081 generic.go:334] "Generic (PLEG): container finished" podID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerID="b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261" exitCode=0 Oct 03 17:23:19 crc kubenswrapper[5081]: I1003 17:23:19.334419 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7q2c" event={"ID":"6199779f-33e9-4eba-bfe7-8b3e3940c824","Type":"ContainerDied","Data":"b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261"} Oct 03 17:23:20 crc kubenswrapper[5081]: I1003 17:23:20.348713 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7q2c" event={"ID":"6199779f-33e9-4eba-bfe7-8b3e3940c824","Type":"ContainerStarted","Data":"531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654"} Oct 03 17:23:20 crc kubenswrapper[5081]: I1003 17:23:20.374797 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h7q2c" podStartSLOduration=1.905800613 podStartE2EDuration="7.374757673s" podCreationTimestamp="2025-10-03 17:23:13 +0000 UTC" firstStartedPulling="2025-10-03 17:23:14.280808034 +0000 UTC m=+6913.246364647" lastFinishedPulling="2025-10-03 17:23:19.749765104 +0000 UTC m=+6918.715321707" observedRunningTime="2025-10-03 17:23:20.370599024 +0000 UTC m=+6919.336155627" watchObservedRunningTime="2025-10-03 17:23:20.374757673 +0000 UTC m=+6919.340314306" Oct 03 17:23:23 crc kubenswrapper[5081]: I1003 17:23:23.455644 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:23 crc kubenswrapper[5081]: I1003 17:23:23.456369 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:24 crc kubenswrapper[5081]: I1003 17:23:24.505494 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h7q2c" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="registry-server" probeResult="failure" output=< Oct 03 17:23:24 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 17:23:24 crc kubenswrapper[5081]: > Oct 03 17:23:33 crc kubenswrapper[5081]: I1003 17:23:33.504428 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:33 crc kubenswrapper[5081]: I1003 17:23:33.557692 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:33 crc kubenswrapper[5081]: I1003 17:23:33.739631 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h7q2c"] Oct 03 17:23:35 crc kubenswrapper[5081]: I1003 17:23:35.497607 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h7q2c" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="registry-server" containerID="cri-o://531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654" gracePeriod=2 Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.147860 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.200474 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-utilities\") pod \"6199779f-33e9-4eba-bfe7-8b3e3940c824\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.200582 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85gpx\" (UniqueName: \"kubernetes.io/projected/6199779f-33e9-4eba-bfe7-8b3e3940c824-kube-api-access-85gpx\") pod \"6199779f-33e9-4eba-bfe7-8b3e3940c824\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.200696 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-catalog-content\") pod \"6199779f-33e9-4eba-bfe7-8b3e3940c824\" (UID: \"6199779f-33e9-4eba-bfe7-8b3e3940c824\") " Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.205754 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-utilities" (OuterVolumeSpecName: "utilities") pod "6199779f-33e9-4eba-bfe7-8b3e3940c824" (UID: "6199779f-33e9-4eba-bfe7-8b3e3940c824"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.230689 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6199779f-33e9-4eba-bfe7-8b3e3940c824-kube-api-access-85gpx" (OuterVolumeSpecName: "kube-api-access-85gpx") pod "6199779f-33e9-4eba-bfe7-8b3e3940c824" (UID: "6199779f-33e9-4eba-bfe7-8b3e3940c824"). InnerVolumeSpecName "kube-api-access-85gpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.304142 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.304172 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85gpx\" (UniqueName: \"kubernetes.io/projected/6199779f-33e9-4eba-bfe7-8b3e3940c824-kube-api-access-85gpx\") on node \"crc\" DevicePath \"\"" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.346543 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6199779f-33e9-4eba-bfe7-8b3e3940c824" (UID: "6199779f-33e9-4eba-bfe7-8b3e3940c824"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.407059 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6199779f-33e9-4eba-bfe7-8b3e3940c824-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.523368 5081 generic.go:334] "Generic (PLEG): container finished" podID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerID="531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654" exitCode=0 Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.523463 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7q2c" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.523478 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7q2c" event={"ID":"6199779f-33e9-4eba-bfe7-8b3e3940c824","Type":"ContainerDied","Data":"531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654"} Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.524890 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7q2c" event={"ID":"6199779f-33e9-4eba-bfe7-8b3e3940c824","Type":"ContainerDied","Data":"04603dec5a185c2499b3c8f364917a447c73db63c6b430255ccfdf4cb5a41304"} Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.524930 5081 scope.go:117] "RemoveContainer" containerID="531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.550485 5081 scope.go:117] "RemoveContainer" containerID="b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.566623 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h7q2c"] Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.577807 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h7q2c"] Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.599552 5081 scope.go:117] "RemoveContainer" containerID="192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.641001 5081 scope.go:117] "RemoveContainer" containerID="531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654" Oct 03 17:23:36 crc kubenswrapper[5081]: E1003 17:23:36.641618 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654\": container with ID starting with 531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654 not found: ID does not exist" containerID="531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.641695 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654"} err="failed to get container status \"531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654\": rpc error: code = NotFound desc = could not find container \"531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654\": container with ID starting with 531e5bd207c1ad7aa82e00e0fd179c1ad11e4e3f6c79943c15734dce7839f654 not found: ID does not exist" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.641736 5081 scope.go:117] "RemoveContainer" containerID="b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261" Oct 03 17:23:36 crc kubenswrapper[5081]: E1003 17:23:36.642232 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261\": container with ID starting with b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261 not found: ID does not exist" containerID="b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.642270 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261"} err="failed to get container status \"b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261\": rpc error: code = NotFound desc = could not find container \"b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261\": container with ID starting with b944482bdeb89e110218707de36c4d44cb2ea3daa93a8c97d0645ac5aa030261 not found: ID does not exist" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.642288 5081 scope.go:117] "RemoveContainer" containerID="192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4" Oct 03 17:23:36 crc kubenswrapper[5081]: E1003 17:23:36.642766 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4\": container with ID starting with 192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4 not found: ID does not exist" containerID="192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4" Oct 03 17:23:36 crc kubenswrapper[5081]: I1003 17:23:36.642830 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4"} err="failed to get container status \"192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4\": rpc error: code = NotFound desc = could not find container \"192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4\": container with ID starting with 192f741aa60989d30fbe94eeb23d2c2256749622786382f3f6b12661bbb870c4 not found: ID does not exist" Oct 03 17:23:37 crc kubenswrapper[5081]: I1003 17:23:37.839198 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" path="/var/lib/kubelet/pods/6199779f-33e9-4eba-bfe7-8b3e3940c824/volumes" Oct 03 17:23:59 crc kubenswrapper[5081]: I1003 17:23:59.038188 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-mhtmp"] Oct 03 17:23:59 crc kubenswrapper[5081]: I1003 17:23:59.049109 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-mhtmp"] Oct 03 17:23:59 crc kubenswrapper[5081]: I1003 17:23:59.841201 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc9afd6e-cad6-434a-ab56-1335ac3964e4" path="/var/lib/kubelet/pods/dc9afd6e-cad6-434a-ab56-1335ac3964e4/volumes" Oct 03 17:24:09 crc kubenswrapper[5081]: I1003 17:24:09.031533 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-5655-account-create-cxlsl"] Oct 03 17:24:09 crc kubenswrapper[5081]: I1003 17:24:09.040851 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-5655-account-create-cxlsl"] Oct 03 17:24:09 crc kubenswrapper[5081]: I1003 17:24:09.839902 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eb9e7af-18ff-45e4-a6ce-12ebc838807b" path="/var/lib/kubelet/pods/6eb9e7af-18ff-45e4-a6ce-12ebc838807b/volumes" Oct 03 17:24:21 crc kubenswrapper[5081]: I1003 17:24:21.027473 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-kdl8p"] Oct 03 17:24:21 crc kubenswrapper[5081]: I1003 17:24:21.036268 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-kdl8p"] Oct 03 17:24:21 crc kubenswrapper[5081]: I1003 17:24:21.842136 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6259e68-df51-461f-b4bf-43269250f8aa" path="/var/lib/kubelet/pods/a6259e68-df51-461f-b4bf-43269250f8aa/volumes" Oct 03 17:24:27 crc kubenswrapper[5081]: I1003 17:24:27.539728 5081 scope.go:117] "RemoveContainer" containerID="30d125323fb8a2991ac92c59b5225a4c1d82aa58b5b1f738e1633b92bc60faf3" Oct 03 17:24:27 crc kubenswrapper[5081]: I1003 17:24:27.576179 5081 scope.go:117] "RemoveContainer" containerID="4e135bf1c50a68855dea7373ad768bfb53d71f35c3795280490c6c398241013d" Oct 03 17:24:27 crc kubenswrapper[5081]: I1003 17:24:27.637205 5081 scope.go:117] "RemoveContainer" containerID="568803f51cef577818e0553d05beec547239bcc4bb431e1807b1f7a1cab33c15" Oct 03 17:24:42 crc kubenswrapper[5081]: I1003 17:24:42.039152 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-5wkcf"] Oct 03 17:24:42 crc kubenswrapper[5081]: I1003 17:24:42.051601 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-5wkcf"] Oct 03 17:24:43 crc kubenswrapper[5081]: I1003 17:24:43.841426 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f2385d1-a278-41cc-ba7c-f61641330a6d" path="/var/lib/kubelet/pods/2f2385d1-a278-41cc-ba7c-f61641330a6d/volumes" Oct 03 17:24:52 crc kubenswrapper[5081]: I1003 17:24:52.039075 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-3530-account-create-brtxv"] Oct 03 17:24:52 crc kubenswrapper[5081]: I1003 17:24:52.048905 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-3530-account-create-brtxv"] Oct 03 17:24:53 crc kubenswrapper[5081]: I1003 17:24:53.846631 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dc291b0-2241-49ae-b987-c456e046aa57" path="/var/lib/kubelet/pods/4dc291b0-2241-49ae-b987-c456e046aa57/volumes" Oct 03 17:25:00 crc kubenswrapper[5081]: I1003 17:25:00.647792 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:25:00 crc kubenswrapper[5081]: I1003 17:25:00.648283 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:25:04 crc kubenswrapper[5081]: I1003 17:25:04.045981 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-5qg9r"] Oct 03 17:25:04 crc kubenswrapper[5081]: I1003 17:25:04.053635 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-5qg9r"] Oct 03 17:25:05 crc kubenswrapper[5081]: I1003 17:25:05.846475 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="613389b9-58e0-4c4e-b065-2b379fff72ed" path="/var/lib/kubelet/pods/613389b9-58e0-4c4e-b065-2b379fff72ed/volumes" Oct 03 17:25:27 crc kubenswrapper[5081]: I1003 17:25:27.772071 5081 scope.go:117] "RemoveContainer" containerID="a4a1f8225d015010c8c489aea5fb798e939911e5ac2ccf7ee492545c86fe5451" Oct 03 17:25:27 crc kubenswrapper[5081]: I1003 17:25:27.812943 5081 scope.go:117] "RemoveContainer" containerID="b39aef380e29183744b55cfde8038231a362c26e3a487eb7c456d16d14e11558" Oct 03 17:25:27 crc kubenswrapper[5081]: I1003 17:25:27.879491 5081 scope.go:117] "RemoveContainer" containerID="f17232ee9f9a4cb28e6b1150f3edfeea2be86cba633912db7460115a86b4be76" Oct 03 17:25:30 crc kubenswrapper[5081]: I1003 17:25:30.647528 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:25:30 crc kubenswrapper[5081]: I1003 17:25:30.648101 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.647092 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.648517 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.648760 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.649542 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.649640 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" gracePeriod=600 Oct 03 17:26:00 crc kubenswrapper[5081]: E1003 17:26:00.777160 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.875078 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" exitCode=0 Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.875137 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2"} Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.875176 5081 scope.go:117] "RemoveContainer" containerID="154bc795d4a1a5cb564cc35ce4ea861ece0a07ed2452d026738a195bd65f2f04" Oct 03 17:26:00 crc kubenswrapper[5081]: I1003 17:26:00.875920 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:26:00 crc kubenswrapper[5081]: E1003 17:26:00.876225 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:26:14 crc kubenswrapper[5081]: I1003 17:26:14.828317 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:26:14 crc kubenswrapper[5081]: E1003 17:26:14.829178 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:26:28 crc kubenswrapper[5081]: I1003 17:26:28.827377 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:26:28 crc kubenswrapper[5081]: E1003 17:26:28.828205 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:26:41 crc kubenswrapper[5081]: I1003 17:26:41.834856 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:26:41 crc kubenswrapper[5081]: E1003 17:26:41.835652 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:26:55 crc kubenswrapper[5081]: I1003 17:26:55.828348 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:26:55 crc kubenswrapper[5081]: E1003 17:26:55.829181 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:27:06 crc kubenswrapper[5081]: I1003 17:27:06.828751 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:27:06 crc kubenswrapper[5081]: E1003 17:27:06.829658 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:27:18 crc kubenswrapper[5081]: I1003 17:27:18.828100 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:27:18 crc kubenswrapper[5081]: E1003 17:27:18.829277 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.652751 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dqw55"] Oct 03 17:27:28 crc kubenswrapper[5081]: E1003 17:27:28.653625 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="extract-utilities" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.653635 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="extract-utilities" Oct 03 17:27:28 crc kubenswrapper[5081]: E1003 17:27:28.653674 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="extract-content" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.653680 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="extract-content" Oct 03 17:27:28 crc kubenswrapper[5081]: E1003 17:27:28.653699 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="registry-server" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.653705 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="registry-server" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.653930 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="6199779f-33e9-4eba-bfe7-8b3e3940c824" containerName="registry-server" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.655809 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.676805 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dqw55"] Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.742233 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dghlh\" (UniqueName: \"kubernetes.io/projected/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-kube-api-access-dghlh\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.742366 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-utilities\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.742469 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-catalog-content\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.843612 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-catalog-content\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.843732 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dghlh\" (UniqueName: \"kubernetes.io/projected/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-kube-api-access-dghlh\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.843823 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-utilities\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.844038 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-catalog-content\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.844090 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-utilities\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.863656 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dghlh\" (UniqueName: \"kubernetes.io/projected/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-kube-api-access-dghlh\") pod \"community-operators-dqw55\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:28 crc kubenswrapper[5081]: I1003 17:27:28.993516 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:29 crc kubenswrapper[5081]: I1003 17:27:29.514000 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dqw55"] Oct 03 17:27:29 crc kubenswrapper[5081]: I1003 17:27:29.688665 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqw55" event={"ID":"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb","Type":"ContainerStarted","Data":"6920321ff5ebad91cc15684c7730319343ff67c3b247f31a26fbd64d403c7a5f"} Oct 03 17:27:30 crc kubenswrapper[5081]: I1003 17:27:30.700928 5081 generic.go:334] "Generic (PLEG): container finished" podID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerID="a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897" exitCode=0 Oct 03 17:27:30 crc kubenswrapper[5081]: I1003 17:27:30.701040 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqw55" event={"ID":"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb","Type":"ContainerDied","Data":"a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897"} Oct 03 17:27:30 crc kubenswrapper[5081]: I1003 17:27:30.703773 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:27:30 crc kubenswrapper[5081]: I1003 17:27:30.827979 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:27:30 crc kubenswrapper[5081]: E1003 17:27:30.828668 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:27:31 crc kubenswrapper[5081]: I1003 17:27:31.724570 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqw55" event={"ID":"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb","Type":"ContainerStarted","Data":"b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818"} Oct 03 17:27:32 crc kubenswrapper[5081]: I1003 17:27:32.735689 5081 generic.go:334] "Generic (PLEG): container finished" podID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerID="b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818" exitCode=0 Oct 03 17:27:32 crc kubenswrapper[5081]: I1003 17:27:32.735729 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqw55" event={"ID":"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb","Type":"ContainerDied","Data":"b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818"} Oct 03 17:27:33 crc kubenswrapper[5081]: I1003 17:27:33.747003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqw55" event={"ID":"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb","Type":"ContainerStarted","Data":"0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5"} Oct 03 17:27:33 crc kubenswrapper[5081]: I1003 17:27:33.766588 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dqw55" podStartSLOduration=3.323592375 podStartE2EDuration="5.766546856s" podCreationTimestamp="2025-10-03 17:27:28 +0000 UTC" firstStartedPulling="2025-10-03 17:27:30.703464235 +0000 UTC m=+7169.669020848" lastFinishedPulling="2025-10-03 17:27:33.146418716 +0000 UTC m=+7172.111975329" observedRunningTime="2025-10-03 17:27:33.762602443 +0000 UTC m=+7172.728159066" watchObservedRunningTime="2025-10-03 17:27:33.766546856 +0000 UTC m=+7172.732103469" Oct 03 17:27:38 crc kubenswrapper[5081]: I1003 17:27:38.993753 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:38 crc kubenswrapper[5081]: I1003 17:27:38.994655 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:39 crc kubenswrapper[5081]: I1003 17:27:39.042665 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:39 crc kubenswrapper[5081]: I1003 17:27:39.843970 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:39 crc kubenswrapper[5081]: I1003 17:27:39.891757 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dqw55"] Oct 03 17:27:41 crc kubenswrapper[5081]: I1003 17:27:41.817103 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dqw55" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="registry-server" containerID="cri-o://0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5" gracePeriod=2 Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.317337 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.420936 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-utilities\") pod \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.421074 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dghlh\" (UniqueName: \"kubernetes.io/projected/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-kube-api-access-dghlh\") pod \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.421161 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-catalog-content\") pod \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\" (UID: \"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb\") " Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.421941 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-utilities" (OuterVolumeSpecName: "utilities") pod "1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" (UID: "1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.426384 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-kube-api-access-dghlh" (OuterVolumeSpecName: "kube-api-access-dghlh") pod "1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" (UID: "1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb"). InnerVolumeSpecName "kube-api-access-dghlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.497069 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" (UID: "1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.524308 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dghlh\" (UniqueName: \"kubernetes.io/projected/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-kube-api-access-dghlh\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.524380 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.524391 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.828277 5081 generic.go:334] "Generic (PLEG): container finished" podID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerID="0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5" exitCode=0 Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.828318 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqw55" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.828342 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqw55" event={"ID":"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb","Type":"ContainerDied","Data":"0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5"} Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.828543 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqw55" event={"ID":"1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb","Type":"ContainerDied","Data":"6920321ff5ebad91cc15684c7730319343ff67c3b247f31a26fbd64d403c7a5f"} Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.828583 5081 scope.go:117] "RemoveContainer" containerID="0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.848901 5081 scope.go:117] "RemoveContainer" containerID="b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.865145 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dqw55"] Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.874828 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dqw55"] Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.891713 5081 scope.go:117] "RemoveContainer" containerID="a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.926603 5081 scope.go:117] "RemoveContainer" containerID="0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5" Oct 03 17:27:42 crc kubenswrapper[5081]: E1003 17:27:42.927210 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5\": container with ID starting with 0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5 not found: ID does not exist" containerID="0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.927253 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5"} err="failed to get container status \"0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5\": rpc error: code = NotFound desc = could not find container \"0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5\": container with ID starting with 0e242c910aadea2b79386cb21765b07020cb7a0466c60ab43b5d94d034cc06f5 not found: ID does not exist" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.927281 5081 scope.go:117] "RemoveContainer" containerID="b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818" Oct 03 17:27:42 crc kubenswrapper[5081]: E1003 17:27:42.927684 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818\": container with ID starting with b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818 not found: ID does not exist" containerID="b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.927714 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818"} err="failed to get container status \"b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818\": rpc error: code = NotFound desc = could not find container \"b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818\": container with ID starting with b8f187bd5be9cb7044610017ed28af848ee3c66946877b0219a802fc7a62d818 not found: ID does not exist" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.927735 5081 scope.go:117] "RemoveContainer" containerID="a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897" Oct 03 17:27:42 crc kubenswrapper[5081]: E1003 17:27:42.928143 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897\": container with ID starting with a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897 not found: ID does not exist" containerID="a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897" Oct 03 17:27:42 crc kubenswrapper[5081]: I1003 17:27:42.928170 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897"} err="failed to get container status \"a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897\": rpc error: code = NotFound desc = could not find container \"a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897\": container with ID starting with a45e30231c4f7b124c971cd77a4a49e237edcf06793b3cc42cfa61b579fbd897 not found: ID does not exist" Oct 03 17:27:43 crc kubenswrapper[5081]: I1003 17:27:43.840550 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" path="/var/lib/kubelet/pods/1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb/volumes" Oct 03 17:27:45 crc kubenswrapper[5081]: I1003 17:27:45.829036 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:27:45 crc kubenswrapper[5081]: E1003 17:27:45.830662 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:27:55 crc kubenswrapper[5081]: I1003 17:27:55.955364 5081 generic.go:334] "Generic (PLEG): container finished" podID="bd0f854a-61ae-40aa-b100-ae4918c3dcea" containerID="cbf77889376ccc42f698c738fa7341b2143547c4c37a4ac24899e5942e2cf115" exitCode=0 Oct 03 17:27:55 crc kubenswrapper[5081]: I1003 17:27:55.955466 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" event={"ID":"bd0f854a-61ae-40aa-b100-ae4918c3dcea","Type":"ContainerDied","Data":"cbf77889376ccc42f698c738fa7341b2143547c4c37a4ac24899e5942e2cf115"} Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.400673 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.542041 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-tripleo-cleanup-combined-ca-bundle\") pod \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.542269 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-inventory\") pod \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.542314 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8z8t\" (UniqueName: \"kubernetes.io/projected/bd0f854a-61ae-40aa-b100-ae4918c3dcea-kube-api-access-b8z8t\") pod \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.542390 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ceph\") pod \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.542440 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ssh-key\") pod \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\" (UID: \"bd0f854a-61ae-40aa-b100-ae4918c3dcea\") " Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.548094 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd0f854a-61ae-40aa-b100-ae4918c3dcea-kube-api-access-b8z8t" (OuterVolumeSpecName: "kube-api-access-b8z8t") pod "bd0f854a-61ae-40aa-b100-ae4918c3dcea" (UID: "bd0f854a-61ae-40aa-b100-ae4918c3dcea"). InnerVolumeSpecName "kube-api-access-b8z8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.548171 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ceph" (OuterVolumeSpecName: "ceph") pod "bd0f854a-61ae-40aa-b100-ae4918c3dcea" (UID: "bd0f854a-61ae-40aa-b100-ae4918c3dcea"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.548382 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "bd0f854a-61ae-40aa-b100-ae4918c3dcea" (UID: "bd0f854a-61ae-40aa-b100-ae4918c3dcea"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.570797 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bd0f854a-61ae-40aa-b100-ae4918c3dcea" (UID: "bd0f854a-61ae-40aa-b100-ae4918c3dcea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.571897 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-inventory" (OuterVolumeSpecName: "inventory") pod "bd0f854a-61ae-40aa-b100-ae4918c3dcea" (UID: "bd0f854a-61ae-40aa-b100-ae4918c3dcea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.644518 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.644570 5081 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.644586 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.644599 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8z8t\" (UniqueName: \"kubernetes.io/projected/bd0f854a-61ae-40aa-b100-ae4918c3dcea-kube-api-access-b8z8t\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.644611 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bd0f854a-61ae-40aa-b100-ae4918c3dcea-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.977613 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" event={"ID":"bd0f854a-61ae-40aa-b100-ae4918c3dcea","Type":"ContainerDied","Data":"7046e8700af0bfef962dad6ff6e0a8aa347f1f17006503ab59090f8307a9831f"} Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.978132 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7046e8700af0bfef962dad6ff6e0a8aa347f1f17006503ab59090f8307a9831f" Oct 03 17:27:57 crc kubenswrapper[5081]: I1003 17:27:57.977701 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9" Oct 03 17:27:59 crc kubenswrapper[5081]: I1003 17:27:59.828111 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:27:59 crc kubenswrapper[5081]: E1003 17:27:59.828745 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.183939 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-xjbtb"] Oct 03 17:28:06 crc kubenswrapper[5081]: E1003 17:28:06.186490 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd0f854a-61ae-40aa-b100-ae4918c3dcea" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.186587 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd0f854a-61ae-40aa-b100-ae4918c3dcea" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 03 17:28:06 crc kubenswrapper[5081]: E1003 17:28:06.186716 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="extract-content" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.186776 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="extract-content" Oct 03 17:28:06 crc kubenswrapper[5081]: E1003 17:28:06.186851 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="registry-server" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.186921 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="registry-server" Oct 03 17:28:06 crc kubenswrapper[5081]: E1003 17:28:06.186990 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="extract-utilities" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.187061 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="extract-utilities" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.194907 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1384dcca-9d22-4f1a-8c1c-1e88ae8d3dcb" containerName="registry-server" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.195022 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd0f854a-61ae-40aa-b100-ae4918c3dcea" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.196861 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.198529 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-xjbtb"] Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.199233 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.201841 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.202100 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.202270 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.331152 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.331296 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-inventory\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.331599 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ceph\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.332134 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqsfk\" (UniqueName: \"kubernetes.io/projected/fa4df372-397d-46cd-81e5-a8dae67295ad-kube-api-access-vqsfk\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.332189 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.434679 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqsfk\" (UniqueName: \"kubernetes.io/projected/fa4df372-397d-46cd-81e5-a8dae67295ad-kube-api-access-vqsfk\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.434734 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.434768 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.434852 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-inventory\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.434901 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ceph\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.441781 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.442176 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.442240 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ceph\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.452518 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqsfk\" (UniqueName: \"kubernetes.io/projected/fa4df372-397d-46cd-81e5-a8dae67295ad-kube-api-access-vqsfk\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.466049 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-inventory\") pod \"bootstrap-openstack-openstack-cell1-xjbtb\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:06 crc kubenswrapper[5081]: I1003 17:28:06.525662 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:28:07 crc kubenswrapper[5081]: I1003 17:28:07.026232 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-xjbtb"] Oct 03 17:28:07 crc kubenswrapper[5081]: I1003 17:28:07.057011 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" event={"ID":"fa4df372-397d-46cd-81e5-a8dae67295ad","Type":"ContainerStarted","Data":"ba52eb5a85626245cf3adccec282b992f62acc1ff18ccdcecdb429bb0cced23f"} Oct 03 17:28:08 crc kubenswrapper[5081]: I1003 17:28:08.066274 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" event={"ID":"fa4df372-397d-46cd-81e5-a8dae67295ad","Type":"ContainerStarted","Data":"5c80686461bdb5b046885630ecf15c8eb1064201d2fcf74e9f1f25c68ea28607"} Oct 03 17:28:08 crc kubenswrapper[5081]: I1003 17:28:08.085100 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" podStartSLOduration=1.43053213 podStartE2EDuration="2.085079877s" podCreationTimestamp="2025-10-03 17:28:06 +0000 UTC" firstStartedPulling="2025-10-03 17:28:07.031652372 +0000 UTC m=+7205.997208985" lastFinishedPulling="2025-10-03 17:28:07.686200119 +0000 UTC m=+7206.651756732" observedRunningTime="2025-10-03 17:28:08.08307677 +0000 UTC m=+7207.048633393" watchObservedRunningTime="2025-10-03 17:28:08.085079877 +0000 UTC m=+7207.050636490" Oct 03 17:28:11 crc kubenswrapper[5081]: I1003 17:28:11.837314 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:28:11 crc kubenswrapper[5081]: E1003 17:28:11.838472 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:28:26 crc kubenswrapper[5081]: I1003 17:28:26.828085 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:28:26 crc kubenswrapper[5081]: E1003 17:28:26.828863 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.507727 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mrwln"] Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.511551 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.531072 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mrwln"] Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.654838 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-utilities\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.655367 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-catalog-content\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.655430 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6jxt\" (UniqueName: \"kubernetes.io/projected/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-kube-api-access-w6jxt\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.757947 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-utilities\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.758077 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-catalog-content\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.758133 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6jxt\" (UniqueName: \"kubernetes.io/projected/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-kube-api-access-w6jxt\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.758718 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-utilities\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.758799 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-catalog-content\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.777523 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6jxt\" (UniqueName: \"kubernetes.io/projected/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-kube-api-access-w6jxt\") pod \"certified-operators-mrwln\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.827915 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:28:40 crc kubenswrapper[5081]: E1003 17:28:40.828199 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:28:40 crc kubenswrapper[5081]: I1003 17:28:40.847344 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:41 crc kubenswrapper[5081]: I1003 17:28:41.335373 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mrwln"] Oct 03 17:28:41 crc kubenswrapper[5081]: I1003 17:28:41.375211 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mrwln" event={"ID":"5da5c234-155f-4d2c-96ae-6e7cf3fc7657","Type":"ContainerStarted","Data":"282d7f0279cbb9a274b85e1004b4c9a3fce8e10be0643a6fa372e00c1bafad3a"} Oct 03 17:28:42 crc kubenswrapper[5081]: I1003 17:28:42.394479 5081 generic.go:334] "Generic (PLEG): container finished" podID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerID="885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4" exitCode=0 Oct 03 17:28:42 crc kubenswrapper[5081]: I1003 17:28:42.394838 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mrwln" event={"ID":"5da5c234-155f-4d2c-96ae-6e7cf3fc7657","Type":"ContainerDied","Data":"885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4"} Oct 03 17:28:44 crc kubenswrapper[5081]: I1003 17:28:44.417287 5081 generic.go:334] "Generic (PLEG): container finished" podID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerID="39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04" exitCode=0 Oct 03 17:28:44 crc kubenswrapper[5081]: I1003 17:28:44.417330 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mrwln" event={"ID":"5da5c234-155f-4d2c-96ae-6e7cf3fc7657","Type":"ContainerDied","Data":"39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04"} Oct 03 17:28:45 crc kubenswrapper[5081]: I1003 17:28:45.430118 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mrwln" event={"ID":"5da5c234-155f-4d2c-96ae-6e7cf3fc7657","Type":"ContainerStarted","Data":"2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7"} Oct 03 17:28:45 crc kubenswrapper[5081]: I1003 17:28:45.452418 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mrwln" podStartSLOduration=3.021572173 podStartE2EDuration="5.452396318s" podCreationTimestamp="2025-10-03 17:28:40 +0000 UTC" firstStartedPulling="2025-10-03 17:28:42.397603211 +0000 UTC m=+7241.363159824" lastFinishedPulling="2025-10-03 17:28:44.828427356 +0000 UTC m=+7243.793983969" observedRunningTime="2025-10-03 17:28:45.452391358 +0000 UTC m=+7244.417947971" watchObservedRunningTime="2025-10-03 17:28:45.452396318 +0000 UTC m=+7244.417952931" Oct 03 17:28:50 crc kubenswrapper[5081]: I1003 17:28:50.847933 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:50 crc kubenswrapper[5081]: I1003 17:28:50.848486 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:50 crc kubenswrapper[5081]: I1003 17:28:50.895635 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:51 crc kubenswrapper[5081]: I1003 17:28:51.697931 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:51 crc kubenswrapper[5081]: I1003 17:28:51.754622 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mrwln"] Oct 03 17:28:52 crc kubenswrapper[5081]: I1003 17:28:52.828853 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:28:52 crc kubenswrapper[5081]: E1003 17:28:52.829386 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:28:53 crc kubenswrapper[5081]: I1003 17:28:53.518809 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mrwln" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="registry-server" containerID="cri-o://2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7" gracePeriod=2 Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.027346 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.067930 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-utilities\") pod \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.068186 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6jxt\" (UniqueName: \"kubernetes.io/projected/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-kube-api-access-w6jxt\") pod \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.068366 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-catalog-content\") pod \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\" (UID: \"5da5c234-155f-4d2c-96ae-6e7cf3fc7657\") " Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.069445 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-utilities" (OuterVolumeSpecName: "utilities") pod "5da5c234-155f-4d2c-96ae-6e7cf3fc7657" (UID: "5da5c234-155f-4d2c-96ae-6e7cf3fc7657"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.075419 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-kube-api-access-w6jxt" (OuterVolumeSpecName: "kube-api-access-w6jxt") pod "5da5c234-155f-4d2c-96ae-6e7cf3fc7657" (UID: "5da5c234-155f-4d2c-96ae-6e7cf3fc7657"). InnerVolumeSpecName "kube-api-access-w6jxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.170583 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6jxt\" (UniqueName: \"kubernetes.io/projected/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-kube-api-access-w6jxt\") on node \"crc\" DevicePath \"\"" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.170621 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.188757 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5da5c234-155f-4d2c-96ae-6e7cf3fc7657" (UID: "5da5c234-155f-4d2c-96ae-6e7cf3fc7657"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.272803 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5da5c234-155f-4d2c-96ae-6e7cf3fc7657-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.528895 5081 generic.go:334] "Generic (PLEG): container finished" podID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerID="2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7" exitCode=0 Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.528941 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mrwln" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.528944 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mrwln" event={"ID":"5da5c234-155f-4d2c-96ae-6e7cf3fc7657","Type":"ContainerDied","Data":"2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7"} Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.529102 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mrwln" event={"ID":"5da5c234-155f-4d2c-96ae-6e7cf3fc7657","Type":"ContainerDied","Data":"282d7f0279cbb9a274b85e1004b4c9a3fce8e10be0643a6fa372e00c1bafad3a"} Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.529132 5081 scope.go:117] "RemoveContainer" containerID="2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.551252 5081 scope.go:117] "RemoveContainer" containerID="39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.564455 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mrwln"] Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.573120 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mrwln"] Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.600209 5081 scope.go:117] "RemoveContainer" containerID="885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.630206 5081 scope.go:117] "RemoveContainer" containerID="2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7" Oct 03 17:28:54 crc kubenswrapper[5081]: E1003 17:28:54.630747 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7\": container with ID starting with 2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7 not found: ID does not exist" containerID="2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.630784 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7"} err="failed to get container status \"2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7\": rpc error: code = NotFound desc = could not find container \"2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7\": container with ID starting with 2de99db4f4de571153111c3a95d2d20db2d75baab27e22edd96e9738518df9d7 not found: ID does not exist" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.630809 5081 scope.go:117] "RemoveContainer" containerID="39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04" Oct 03 17:28:54 crc kubenswrapper[5081]: E1003 17:28:54.631268 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04\": container with ID starting with 39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04 not found: ID does not exist" containerID="39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.631301 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04"} err="failed to get container status \"39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04\": rpc error: code = NotFound desc = could not find container \"39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04\": container with ID starting with 39dc79a5e7c07963b9e88735e00f06256dcefb0c3c3df45f808f8c1aa0deee04 not found: ID does not exist" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.631321 5081 scope.go:117] "RemoveContainer" containerID="885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4" Oct 03 17:28:54 crc kubenswrapper[5081]: E1003 17:28:54.631609 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4\": container with ID starting with 885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4 not found: ID does not exist" containerID="885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4" Oct 03 17:28:54 crc kubenswrapper[5081]: I1003 17:28:54.631664 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4"} err="failed to get container status \"885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4\": rpc error: code = NotFound desc = could not find container \"885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4\": container with ID starting with 885d634033452bf3b4ed7ce139ca7ec288d262904fa2d1033f520360154031d4 not found: ID does not exist" Oct 03 17:28:55 crc kubenswrapper[5081]: I1003 17:28:55.840720 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" path="/var/lib/kubelet/pods/5da5c234-155f-4d2c-96ae-6e7cf3fc7657/volumes" Oct 03 17:29:07 crc kubenswrapper[5081]: I1003 17:29:07.828022 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:29:07 crc kubenswrapper[5081]: E1003 17:29:07.828839 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:29:19 crc kubenswrapper[5081]: I1003 17:29:19.827687 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:29:19 crc kubenswrapper[5081]: E1003 17:29:19.828546 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:29:34 crc kubenswrapper[5081]: I1003 17:29:34.828123 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:29:34 crc kubenswrapper[5081]: E1003 17:29:34.829843 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:29:46 crc kubenswrapper[5081]: I1003 17:29:46.827656 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:29:46 crc kubenswrapper[5081]: E1003 17:29:46.828491 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:29:59 crc kubenswrapper[5081]: I1003 17:29:59.828008 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:29:59 crc kubenswrapper[5081]: E1003 17:29:59.828777 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.171495 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r"] Oct 03 17:30:00 crc kubenswrapper[5081]: E1003 17:30:00.172100 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="extract-utilities" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.172117 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="extract-utilities" Oct 03 17:30:00 crc kubenswrapper[5081]: E1003 17:30:00.172147 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="registry-server" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.172154 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="registry-server" Oct 03 17:30:00 crc kubenswrapper[5081]: E1003 17:30:00.172170 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="extract-content" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.172177 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="extract-content" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.172402 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="5da5c234-155f-4d2c-96ae-6e7cf3fc7657" containerName="registry-server" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.173315 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.176545 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.176877 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.187718 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r"] Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.277102 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzsm6\" (UniqueName: \"kubernetes.io/projected/f73c6db5-52fb-49c4-9cdb-2405e5f48572-kube-api-access-qzsm6\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.277540 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f73c6db5-52fb-49c4-9cdb-2405e5f48572-secret-volume\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.277695 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f73c6db5-52fb-49c4-9cdb-2405e5f48572-config-volume\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.379769 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzsm6\" (UniqueName: \"kubernetes.io/projected/f73c6db5-52fb-49c4-9cdb-2405e5f48572-kube-api-access-qzsm6\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.379839 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f73c6db5-52fb-49c4-9cdb-2405e5f48572-secret-volume\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.379867 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f73c6db5-52fb-49c4-9cdb-2405e5f48572-config-volume\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.380744 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f73c6db5-52fb-49c4-9cdb-2405e5f48572-config-volume\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.398363 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f73c6db5-52fb-49c4-9cdb-2405e5f48572-secret-volume\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.404405 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzsm6\" (UniqueName: \"kubernetes.io/projected/f73c6db5-52fb-49c4-9cdb-2405e5f48572-kube-api-access-qzsm6\") pod \"collect-profiles-29325210-xhc7r\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.514750 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:00 crc kubenswrapper[5081]: I1003 17:30:00.952433 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r"] Oct 03 17:30:01 crc kubenswrapper[5081]: I1003 17:30:01.137253 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" event={"ID":"f73c6db5-52fb-49c4-9cdb-2405e5f48572","Type":"ContainerStarted","Data":"7be7424434f93a479457a1572354b79b32e08e626dd7e6e015affd448fedc096"} Oct 03 17:30:01 crc kubenswrapper[5081]: I1003 17:30:01.137515 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" event={"ID":"f73c6db5-52fb-49c4-9cdb-2405e5f48572","Type":"ContainerStarted","Data":"8fe46324b7d2aef34a02262eb99c45b14707f0070e20a59c62432bc77ae23e3d"} Oct 03 17:30:01 crc kubenswrapper[5081]: I1003 17:30:01.157606 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" podStartSLOduration=1.157588456 podStartE2EDuration="1.157588456s" podCreationTimestamp="2025-10-03 17:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 17:30:01.149440153 +0000 UTC m=+7320.114996776" watchObservedRunningTime="2025-10-03 17:30:01.157588456 +0000 UTC m=+7320.123145059" Oct 03 17:30:01 crc kubenswrapper[5081]: E1003 17:30:01.483880 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf73c6db5_52fb_49c4_9cdb_2405e5f48572.slice/crio-7be7424434f93a479457a1572354b79b32e08e626dd7e6e015affd448fedc096.scope\": RecentStats: unable to find data in memory cache]" Oct 03 17:30:02 crc kubenswrapper[5081]: I1003 17:30:02.149290 5081 generic.go:334] "Generic (PLEG): container finished" podID="f73c6db5-52fb-49c4-9cdb-2405e5f48572" containerID="7be7424434f93a479457a1572354b79b32e08e626dd7e6e015affd448fedc096" exitCode=0 Oct 03 17:30:02 crc kubenswrapper[5081]: I1003 17:30:02.149399 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" event={"ID":"f73c6db5-52fb-49c4-9cdb-2405e5f48572","Type":"ContainerDied","Data":"7be7424434f93a479457a1572354b79b32e08e626dd7e6e015affd448fedc096"} Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.529389 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.650071 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f73c6db5-52fb-49c4-9cdb-2405e5f48572-config-volume\") pod \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.650542 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f73c6db5-52fb-49c4-9cdb-2405e5f48572-secret-volume\") pod \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.650663 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzsm6\" (UniqueName: \"kubernetes.io/projected/f73c6db5-52fb-49c4-9cdb-2405e5f48572-kube-api-access-qzsm6\") pod \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\" (UID: \"f73c6db5-52fb-49c4-9cdb-2405e5f48572\") " Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.651042 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f73c6db5-52fb-49c4-9cdb-2405e5f48572-config-volume" (OuterVolumeSpecName: "config-volume") pod "f73c6db5-52fb-49c4-9cdb-2405e5f48572" (UID: "f73c6db5-52fb-49c4-9cdb-2405e5f48572"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.651949 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f73c6db5-52fb-49c4-9cdb-2405e5f48572-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.656500 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f73c6db5-52fb-49c4-9cdb-2405e5f48572-kube-api-access-qzsm6" (OuterVolumeSpecName: "kube-api-access-qzsm6") pod "f73c6db5-52fb-49c4-9cdb-2405e5f48572" (UID: "f73c6db5-52fb-49c4-9cdb-2405e5f48572"). InnerVolumeSpecName "kube-api-access-qzsm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.656847 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73c6db5-52fb-49c4-9cdb-2405e5f48572-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f73c6db5-52fb-49c4-9cdb-2405e5f48572" (UID: "f73c6db5-52fb-49c4-9cdb-2405e5f48572"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.753786 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f73c6db5-52fb-49c4-9cdb-2405e5f48572-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:30:03 crc kubenswrapper[5081]: I1003 17:30:03.753819 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzsm6\" (UniqueName: \"kubernetes.io/projected/f73c6db5-52fb-49c4-9cdb-2405e5f48572-kube-api-access-qzsm6\") on node \"crc\" DevicePath \"\"" Oct 03 17:30:04 crc kubenswrapper[5081]: I1003 17:30:04.176340 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" event={"ID":"f73c6db5-52fb-49c4-9cdb-2405e5f48572","Type":"ContainerDied","Data":"8fe46324b7d2aef34a02262eb99c45b14707f0070e20a59c62432bc77ae23e3d"} Oct 03 17:30:04 crc kubenswrapper[5081]: I1003 17:30:04.176382 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fe46324b7d2aef34a02262eb99c45b14707f0070e20a59c62432bc77ae23e3d" Oct 03 17:30:04 crc kubenswrapper[5081]: I1003 17:30:04.176417 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r" Oct 03 17:30:04 crc kubenswrapper[5081]: I1003 17:30:04.219587 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl"] Oct 03 17:30:04 crc kubenswrapper[5081]: I1003 17:30:04.228095 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325165-j74hl"] Oct 03 17:30:05 crc kubenswrapper[5081]: I1003 17:30:05.843396 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cf86ba3-04ed-4956-9a95-90f23ef359a6" path="/var/lib/kubelet/pods/8cf86ba3-04ed-4956-9a95-90f23ef359a6/volumes" Oct 03 17:30:11 crc kubenswrapper[5081]: I1003 17:30:11.834822 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:30:11 crc kubenswrapper[5081]: E1003 17:30:11.835650 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:30:24 crc kubenswrapper[5081]: I1003 17:30:24.828659 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:30:24 crc kubenswrapper[5081]: E1003 17:30:24.829435 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:30:28 crc kubenswrapper[5081]: I1003 17:30:28.143420 5081 scope.go:117] "RemoveContainer" containerID="8ab05bba877dc43f626c8a2d601d7e71a37eef5979b4a36a712c3dcca43ce6a0" Oct 03 17:30:37 crc kubenswrapper[5081]: I1003 17:30:37.828830 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:30:37 crc kubenswrapper[5081]: E1003 17:30:37.829792 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:30:49 crc kubenswrapper[5081]: I1003 17:30:49.828002 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:30:49 crc kubenswrapper[5081]: E1003 17:30:49.828808 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:31:01 crc kubenswrapper[5081]: I1003 17:31:01.835881 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:31:02 crc kubenswrapper[5081]: I1003 17:31:02.720445 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"2733c2283282a30c156dd94be5629e180dfd2de2462d99bd2c6a54fef85d7766"} Oct 03 17:31:11 crc kubenswrapper[5081]: I1003 17:31:11.811050 5081 generic.go:334] "Generic (PLEG): container finished" podID="fa4df372-397d-46cd-81e5-a8dae67295ad" containerID="5c80686461bdb5b046885630ecf15c8eb1064201d2fcf74e9f1f25c68ea28607" exitCode=0 Oct 03 17:31:11 crc kubenswrapper[5081]: I1003 17:31:11.811287 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" event={"ID":"fa4df372-397d-46cd-81e5-a8dae67295ad","Type":"ContainerDied","Data":"5c80686461bdb5b046885630ecf15c8eb1064201d2fcf74e9f1f25c68ea28607"} Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.327931 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.398229 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqsfk\" (UniqueName: \"kubernetes.io/projected/fa4df372-397d-46cd-81e5-a8dae67295ad-kube-api-access-vqsfk\") pod \"fa4df372-397d-46cd-81e5-a8dae67295ad\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.398339 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ssh-key\") pod \"fa4df372-397d-46cd-81e5-a8dae67295ad\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.398462 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-inventory\") pod \"fa4df372-397d-46cd-81e5-a8dae67295ad\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.398548 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-bootstrap-combined-ca-bundle\") pod \"fa4df372-397d-46cd-81e5-a8dae67295ad\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.398726 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ceph\") pod \"fa4df372-397d-46cd-81e5-a8dae67295ad\" (UID: \"fa4df372-397d-46cd-81e5-a8dae67295ad\") " Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.417174 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "fa4df372-397d-46cd-81e5-a8dae67295ad" (UID: "fa4df372-397d-46cd-81e5-a8dae67295ad"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.417675 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa4df372-397d-46cd-81e5-a8dae67295ad-kube-api-access-vqsfk" (OuterVolumeSpecName: "kube-api-access-vqsfk") pod "fa4df372-397d-46cd-81e5-a8dae67295ad" (UID: "fa4df372-397d-46cd-81e5-a8dae67295ad"). InnerVolumeSpecName "kube-api-access-vqsfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.421160 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ceph" (OuterVolumeSpecName: "ceph") pod "fa4df372-397d-46cd-81e5-a8dae67295ad" (UID: "fa4df372-397d-46cd-81e5-a8dae67295ad"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.429928 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-inventory" (OuterVolumeSpecName: "inventory") pod "fa4df372-397d-46cd-81e5-a8dae67295ad" (UID: "fa4df372-397d-46cd-81e5-a8dae67295ad"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.462912 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fa4df372-397d-46cd-81e5-a8dae67295ad" (UID: "fa4df372-397d-46cd-81e5-a8dae67295ad"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.501392 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.501437 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqsfk\" (UniqueName: \"kubernetes.io/projected/fa4df372-397d-46cd-81e5-a8dae67295ad-kube-api-access-vqsfk\") on node \"crc\" DevicePath \"\"" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.501448 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.501459 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.501468 5081 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa4df372-397d-46cd-81e5-a8dae67295ad-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.829761 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.840429 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xjbtb" event={"ID":"fa4df372-397d-46cd-81e5-a8dae67295ad","Type":"ContainerDied","Data":"ba52eb5a85626245cf3adccec282b992f62acc1ff18ccdcecdb429bb0cced23f"} Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.840472 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba52eb5a85626245cf3adccec282b992f62acc1ff18ccdcecdb429bb0cced23f" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.921880 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-tckpk"] Oct 03 17:31:13 crc kubenswrapper[5081]: E1003 17:31:13.922294 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f73c6db5-52fb-49c4-9cdb-2405e5f48572" containerName="collect-profiles" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.922310 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f73c6db5-52fb-49c4-9cdb-2405e5f48572" containerName="collect-profiles" Oct 03 17:31:13 crc kubenswrapper[5081]: E1003 17:31:13.922347 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa4df372-397d-46cd-81e5-a8dae67295ad" containerName="bootstrap-openstack-openstack-cell1" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.922355 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa4df372-397d-46cd-81e5-a8dae67295ad" containerName="bootstrap-openstack-openstack-cell1" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.922549 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa4df372-397d-46cd-81e5-a8dae67295ad" containerName="bootstrap-openstack-openstack-cell1" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.922601 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f73c6db5-52fb-49c4-9cdb-2405e5f48572" containerName="collect-profiles" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.924671 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.930214 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.930382 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.932091 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.933393 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:31:13 crc kubenswrapper[5081]: I1003 17:31:13.964607 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-tckpk"] Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.011689 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ceph\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.012121 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-inventory\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.012167 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxtz7\" (UniqueName: \"kubernetes.io/projected/b07c8283-4077-478a-a8ff-18433231fb38-kube-api-access-pxtz7\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.012337 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ssh-key\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.114659 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ceph\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.114885 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-inventory\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.114909 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxtz7\" (UniqueName: \"kubernetes.io/projected/b07c8283-4077-478a-a8ff-18433231fb38-kube-api-access-pxtz7\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.114966 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ssh-key\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.119868 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ceph\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.120474 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ssh-key\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.121094 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-inventory\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.131816 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxtz7\" (UniqueName: \"kubernetes.io/projected/b07c8283-4077-478a-a8ff-18433231fb38-kube-api-access-pxtz7\") pod \"download-cache-openstack-openstack-cell1-tckpk\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.250949 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.757899 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-tckpk"] Oct 03 17:31:14 crc kubenswrapper[5081]: I1003 17:31:14.845191 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" event={"ID":"b07c8283-4077-478a-a8ff-18433231fb38","Type":"ContainerStarted","Data":"7d81eb8a029e86cf1e7b980e27586cbaa5a2f7138508bea78218292688c13ae8"} Oct 03 17:31:15 crc kubenswrapper[5081]: I1003 17:31:15.855813 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" event={"ID":"b07c8283-4077-478a-a8ff-18433231fb38","Type":"ContainerStarted","Data":"23086d753e3d944c9f7893d33c01610bac4b459fb277a7c66a3414b0356397d2"} Oct 03 17:31:15 crc kubenswrapper[5081]: I1003 17:31:15.881244 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" podStartSLOduration=2.412991899 podStartE2EDuration="2.881225703s" podCreationTimestamp="2025-10-03 17:31:13 +0000 UTC" firstStartedPulling="2025-10-03 17:31:14.763775064 +0000 UTC m=+7393.729331677" lastFinishedPulling="2025-10-03 17:31:15.232008838 +0000 UTC m=+7394.197565481" observedRunningTime="2025-10-03 17:31:15.873630475 +0000 UTC m=+7394.839187118" watchObservedRunningTime="2025-10-03 17:31:15.881225703 +0000 UTC m=+7394.846782316" Oct 03 17:31:59 crc kubenswrapper[5081]: I1003 17:31:59.896478 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c6h6s"] Oct 03 17:31:59 crc kubenswrapper[5081]: I1003 17:31:59.899475 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:31:59 crc kubenswrapper[5081]: I1003 17:31:59.923526 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6h6s"] Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.007422 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-utilities\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.007527 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rxtl\" (UniqueName: \"kubernetes.io/projected/de1c819a-21c6-41f5-a498-d338e63ce62f-kube-api-access-9rxtl\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.007690 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-catalog-content\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.109745 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-utilities\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.109809 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rxtl\" (UniqueName: \"kubernetes.io/projected/de1c819a-21c6-41f5-a498-d338e63ce62f-kube-api-access-9rxtl\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.109862 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-catalog-content\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.110407 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-catalog-content\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.110716 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-utilities\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.133485 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rxtl\" (UniqueName: \"kubernetes.io/projected/de1c819a-21c6-41f5-a498-d338e63ce62f-kube-api-access-9rxtl\") pod \"redhat-marketplace-c6h6s\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.240249 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:00 crc kubenswrapper[5081]: I1003 17:32:00.752466 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6h6s"] Oct 03 17:32:00 crc kubenswrapper[5081]: W1003 17:32:00.759509 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde1c819a_21c6_41f5_a498_d338e63ce62f.slice/crio-727fa93abbaf03dda8f26f14c0d0c67dc8c03a02a1877e868767fbc8c4d8d11e WatchSource:0}: Error finding container 727fa93abbaf03dda8f26f14c0d0c67dc8c03a02a1877e868767fbc8c4d8d11e: Status 404 returned error can't find the container with id 727fa93abbaf03dda8f26f14c0d0c67dc8c03a02a1877e868767fbc8c4d8d11e Oct 03 17:32:01 crc kubenswrapper[5081]: I1003 17:32:01.348792 5081 generic.go:334] "Generic (PLEG): container finished" podID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerID="62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4" exitCode=0 Oct 03 17:32:01 crc kubenswrapper[5081]: I1003 17:32:01.348877 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6h6s" event={"ID":"de1c819a-21c6-41f5-a498-d338e63ce62f","Type":"ContainerDied","Data":"62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4"} Oct 03 17:32:01 crc kubenswrapper[5081]: I1003 17:32:01.349275 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6h6s" event={"ID":"de1c819a-21c6-41f5-a498-d338e63ce62f","Type":"ContainerStarted","Data":"727fa93abbaf03dda8f26f14c0d0c67dc8c03a02a1877e868767fbc8c4d8d11e"} Oct 03 17:32:02 crc kubenswrapper[5081]: I1003 17:32:02.372485 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6h6s" event={"ID":"de1c819a-21c6-41f5-a498-d338e63ce62f","Type":"ContainerStarted","Data":"8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765"} Oct 03 17:32:03 crc kubenswrapper[5081]: I1003 17:32:03.387473 5081 generic.go:334] "Generic (PLEG): container finished" podID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerID="8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765" exitCode=0 Oct 03 17:32:03 crc kubenswrapper[5081]: I1003 17:32:03.387588 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6h6s" event={"ID":"de1c819a-21c6-41f5-a498-d338e63ce62f","Type":"ContainerDied","Data":"8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765"} Oct 03 17:32:04 crc kubenswrapper[5081]: I1003 17:32:04.407076 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6h6s" event={"ID":"de1c819a-21c6-41f5-a498-d338e63ce62f","Type":"ContainerStarted","Data":"485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa"} Oct 03 17:32:04 crc kubenswrapper[5081]: I1003 17:32:04.431809 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c6h6s" podStartSLOduration=2.638300079 podStartE2EDuration="5.431791407s" podCreationTimestamp="2025-10-03 17:31:59 +0000 UTC" firstStartedPulling="2025-10-03 17:32:01.350973694 +0000 UTC m=+7440.316530307" lastFinishedPulling="2025-10-03 17:32:04.144465022 +0000 UTC m=+7443.110021635" observedRunningTime="2025-10-03 17:32:04.427658889 +0000 UTC m=+7443.393215512" watchObservedRunningTime="2025-10-03 17:32:04.431791407 +0000 UTC m=+7443.397348020" Oct 03 17:32:10 crc kubenswrapper[5081]: I1003 17:32:10.240723 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:10 crc kubenswrapper[5081]: I1003 17:32:10.241329 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:10 crc kubenswrapper[5081]: I1003 17:32:10.320386 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:10 crc kubenswrapper[5081]: I1003 17:32:10.536932 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:10 crc kubenswrapper[5081]: I1003 17:32:10.604361 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6h6s"] Oct 03 17:32:12 crc kubenswrapper[5081]: I1003 17:32:12.506448 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c6h6s" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="registry-server" containerID="cri-o://485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa" gracePeriod=2 Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.096889 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.171477 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rxtl\" (UniqueName: \"kubernetes.io/projected/de1c819a-21c6-41f5-a498-d338e63ce62f-kube-api-access-9rxtl\") pod \"de1c819a-21c6-41f5-a498-d338e63ce62f\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.171632 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-catalog-content\") pod \"de1c819a-21c6-41f5-a498-d338e63ce62f\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.171725 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-utilities\") pod \"de1c819a-21c6-41f5-a498-d338e63ce62f\" (UID: \"de1c819a-21c6-41f5-a498-d338e63ce62f\") " Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.173010 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-utilities" (OuterVolumeSpecName: "utilities") pod "de1c819a-21c6-41f5-a498-d338e63ce62f" (UID: "de1c819a-21c6-41f5-a498-d338e63ce62f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.181949 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de1c819a-21c6-41f5-a498-d338e63ce62f-kube-api-access-9rxtl" (OuterVolumeSpecName: "kube-api-access-9rxtl") pod "de1c819a-21c6-41f5-a498-d338e63ce62f" (UID: "de1c819a-21c6-41f5-a498-d338e63ce62f"). InnerVolumeSpecName "kube-api-access-9rxtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.184760 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de1c819a-21c6-41f5-a498-d338e63ce62f" (UID: "de1c819a-21c6-41f5-a498-d338e63ce62f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.275165 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rxtl\" (UniqueName: \"kubernetes.io/projected/de1c819a-21c6-41f5-a498-d338e63ce62f-kube-api-access-9rxtl\") on node \"crc\" DevicePath \"\"" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.275207 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.275217 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de1c819a-21c6-41f5-a498-d338e63ce62f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.518712 5081 generic.go:334] "Generic (PLEG): container finished" podID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerID="485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa" exitCode=0 Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.518819 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6h6s" event={"ID":"de1c819a-21c6-41f5-a498-d338e63ce62f","Type":"ContainerDied","Data":"485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa"} Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.518868 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6h6s" event={"ID":"de1c819a-21c6-41f5-a498-d338e63ce62f","Type":"ContainerDied","Data":"727fa93abbaf03dda8f26f14c0d0c67dc8c03a02a1877e868767fbc8c4d8d11e"} Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.518887 5081 scope.go:117] "RemoveContainer" containerID="485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.518832 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6h6s" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.559805 5081 scope.go:117] "RemoveContainer" containerID="8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.566779 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6h6s"] Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.583201 5081 scope.go:117] "RemoveContainer" containerID="62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.583897 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6h6s"] Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.640207 5081 scope.go:117] "RemoveContainer" containerID="485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa" Oct 03 17:32:13 crc kubenswrapper[5081]: E1003 17:32:13.640798 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa\": container with ID starting with 485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa not found: ID does not exist" containerID="485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.640893 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa"} err="failed to get container status \"485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa\": rpc error: code = NotFound desc = could not find container \"485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa\": container with ID starting with 485ca0abe450394146bf2946a051edfd9a5b6a50d82e037c481ea700e46c4aaa not found: ID does not exist" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.640947 5081 scope.go:117] "RemoveContainer" containerID="8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765" Oct 03 17:32:13 crc kubenswrapper[5081]: E1003 17:32:13.641477 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765\": container with ID starting with 8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765 not found: ID does not exist" containerID="8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.641545 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765"} err="failed to get container status \"8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765\": rpc error: code = NotFound desc = could not find container \"8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765\": container with ID starting with 8fe7dca765ed244ceb1197568d9a1034d045ba1ea7b3f07af06f4f24701d2765 not found: ID does not exist" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.641629 5081 scope.go:117] "RemoveContainer" containerID="62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4" Oct 03 17:32:13 crc kubenswrapper[5081]: E1003 17:32:13.642099 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4\": container with ID starting with 62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4 not found: ID does not exist" containerID="62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.642145 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4"} err="failed to get container status \"62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4\": rpc error: code = NotFound desc = could not find container \"62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4\": container with ID starting with 62f98f7b39a42acd6a9c59e8aa64f21946e02331da88d6c051968a73ae4cbcd4 not found: ID does not exist" Oct 03 17:32:13 crc kubenswrapper[5081]: I1003 17:32:13.849722 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" path="/var/lib/kubelet/pods/de1c819a-21c6-41f5-a498-d338e63ce62f/volumes" Oct 03 17:32:46 crc kubenswrapper[5081]: I1003 17:32:46.950526 5081 generic.go:334] "Generic (PLEG): container finished" podID="b07c8283-4077-478a-a8ff-18433231fb38" containerID="23086d753e3d944c9f7893d33c01610bac4b459fb277a7c66a3414b0356397d2" exitCode=0 Oct 03 17:32:46 crc kubenswrapper[5081]: I1003 17:32:46.950603 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" event={"ID":"b07c8283-4077-478a-a8ff-18433231fb38","Type":"ContainerDied","Data":"23086d753e3d944c9f7893d33c01610bac4b459fb277a7c66a3414b0356397d2"} Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.426189 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.528934 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxtz7\" (UniqueName: \"kubernetes.io/projected/b07c8283-4077-478a-a8ff-18433231fb38-kube-api-access-pxtz7\") pod \"b07c8283-4077-478a-a8ff-18433231fb38\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.529110 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ssh-key\") pod \"b07c8283-4077-478a-a8ff-18433231fb38\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.529234 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ceph\") pod \"b07c8283-4077-478a-a8ff-18433231fb38\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.529347 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-inventory\") pod \"b07c8283-4077-478a-a8ff-18433231fb38\" (UID: \"b07c8283-4077-478a-a8ff-18433231fb38\") " Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.537828 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b07c8283-4077-478a-a8ff-18433231fb38-kube-api-access-pxtz7" (OuterVolumeSpecName: "kube-api-access-pxtz7") pod "b07c8283-4077-478a-a8ff-18433231fb38" (UID: "b07c8283-4077-478a-a8ff-18433231fb38"). InnerVolumeSpecName "kube-api-access-pxtz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.538758 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ceph" (OuterVolumeSpecName: "ceph") pod "b07c8283-4077-478a-a8ff-18433231fb38" (UID: "b07c8283-4077-478a-a8ff-18433231fb38"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.557420 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b07c8283-4077-478a-a8ff-18433231fb38" (UID: "b07c8283-4077-478a-a8ff-18433231fb38"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.571770 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-inventory" (OuterVolumeSpecName: "inventory") pod "b07c8283-4077-478a-a8ff-18433231fb38" (UID: "b07c8283-4077-478a-a8ff-18433231fb38"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.634302 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.634347 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.634362 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxtz7\" (UniqueName: \"kubernetes.io/projected/b07c8283-4077-478a-a8ff-18433231fb38-kube-api-access-pxtz7\") on node \"crc\" DevicePath \"\"" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.634376 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b07c8283-4077-478a-a8ff-18433231fb38-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.976928 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" event={"ID":"b07c8283-4077-478a-a8ff-18433231fb38","Type":"ContainerDied","Data":"7d81eb8a029e86cf1e7b980e27586cbaa5a2f7138508bea78218292688c13ae8"} Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.976987 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d81eb8a029e86cf1e7b980e27586cbaa5a2f7138508bea78218292688c13ae8" Oct 03 17:32:48 crc kubenswrapper[5081]: I1003 17:32:48.977265 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-tckpk" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.069331 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-tdk8r"] Oct 03 17:32:49 crc kubenswrapper[5081]: E1003 17:32:49.069975 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07c8283-4077-478a-a8ff-18433231fb38" containerName="download-cache-openstack-openstack-cell1" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.069998 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07c8283-4077-478a-a8ff-18433231fb38" containerName="download-cache-openstack-openstack-cell1" Oct 03 17:32:49 crc kubenswrapper[5081]: E1003 17:32:49.070050 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="extract-content" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.070059 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="extract-content" Oct 03 17:32:49 crc kubenswrapper[5081]: E1003 17:32:49.070078 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="extract-utilities" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.070087 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="extract-utilities" Oct 03 17:32:49 crc kubenswrapper[5081]: E1003 17:32:49.070101 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="registry-server" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.070109 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="registry-server" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.070378 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="de1c819a-21c6-41f5-a498-d338e63ce62f" containerName="registry-server" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.070430 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b07c8283-4077-478a-a8ff-18433231fb38" containerName="download-cache-openstack-openstack-cell1" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.071479 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.074062 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.077718 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.078040 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.078214 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.096692 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-tdk8r"] Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.250414 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7l8f\" (UniqueName: \"kubernetes.io/projected/7552988a-62cb-429c-b959-44546d45ba71-kube-api-access-v7l8f\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.250512 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ssh-key\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.250593 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ceph\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.250652 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-inventory\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.352060 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7l8f\" (UniqueName: \"kubernetes.io/projected/7552988a-62cb-429c-b959-44546d45ba71-kube-api-access-v7l8f\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.352201 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ssh-key\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.352249 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ceph\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.352329 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-inventory\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.358652 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-inventory\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.359214 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ceph\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.360510 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ssh-key\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.375173 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7l8f\" (UniqueName: \"kubernetes.io/projected/7552988a-62cb-429c-b959-44546d45ba71-kube-api-access-v7l8f\") pod \"configure-network-openstack-openstack-cell1-tdk8r\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:49 crc kubenswrapper[5081]: I1003 17:32:49.389517 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:32:50 crc kubenswrapper[5081]: I1003 17:32:50.004143 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-tdk8r"] Oct 03 17:32:50 crc kubenswrapper[5081]: W1003 17:32:50.009834 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7552988a_62cb_429c_b959_44546d45ba71.slice/crio-c7bf30872db3a9e672a1e7395a503b1c090c23fa25a82b17ca0e41d95293b05d WatchSource:0}: Error finding container c7bf30872db3a9e672a1e7395a503b1c090c23fa25a82b17ca0e41d95293b05d: Status 404 returned error can't find the container with id c7bf30872db3a9e672a1e7395a503b1c090c23fa25a82b17ca0e41d95293b05d Oct 03 17:32:50 crc kubenswrapper[5081]: I1003 17:32:50.012054 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:32:50 crc kubenswrapper[5081]: I1003 17:32:50.994293 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" event={"ID":"7552988a-62cb-429c-b959-44546d45ba71","Type":"ContainerStarted","Data":"c7bf30872db3a9e672a1e7395a503b1c090c23fa25a82b17ca0e41d95293b05d"} Oct 03 17:32:52 crc kubenswrapper[5081]: I1003 17:32:52.006851 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" event={"ID":"7552988a-62cb-429c-b959-44546d45ba71","Type":"ContainerStarted","Data":"a8b38b235f692e70b83d5fd44fb903d15051b62ba82f6bf7681f348d173443a4"} Oct 03 17:32:52 crc kubenswrapper[5081]: I1003 17:32:52.035839 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" podStartSLOduration=1.515390993 podStartE2EDuration="3.035817337s" podCreationTimestamp="2025-10-03 17:32:49 +0000 UTC" firstStartedPulling="2025-10-03 17:32:50.011764626 +0000 UTC m=+7488.977321239" lastFinishedPulling="2025-10-03 17:32:51.53219097 +0000 UTC m=+7490.497747583" observedRunningTime="2025-10-03 17:32:52.027775577 +0000 UTC m=+7490.993332280" watchObservedRunningTime="2025-10-03 17:32:52.035817337 +0000 UTC m=+7491.001373980" Oct 03 17:33:30 crc kubenswrapper[5081]: I1003 17:33:30.647525 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:33:30 crc kubenswrapper[5081]: I1003 17:33:30.648089 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.494173 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7n5hv"] Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.497264 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.508910 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7n5hv"] Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.574223 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-utilities\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.574388 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpwqw\" (UniqueName: \"kubernetes.io/projected/14d6dc93-073e-4514-8004-6fc3783e39b5-kube-api-access-dpwqw\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.574919 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-catalog-content\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.676623 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-utilities\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.676727 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpwqw\" (UniqueName: \"kubernetes.io/projected/14d6dc93-073e-4514-8004-6fc3783e39b5-kube-api-access-dpwqw\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.676811 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-catalog-content\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.677458 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-catalog-content\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.677586 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-utilities\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.707772 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpwqw\" (UniqueName: \"kubernetes.io/projected/14d6dc93-073e-4514-8004-6fc3783e39b5-kube-api-access-dpwqw\") pod \"redhat-operators-7n5hv\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:33:59 crc kubenswrapper[5081]: I1003 17:33:59.834206 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:34:00 crc kubenswrapper[5081]: I1003 17:34:00.322320 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7n5hv"] Oct 03 17:34:00 crc kubenswrapper[5081]: I1003 17:34:00.647336 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:34:00 crc kubenswrapper[5081]: I1003 17:34:00.647395 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:34:00 crc kubenswrapper[5081]: I1003 17:34:00.783845 5081 generic.go:334] "Generic (PLEG): container finished" podID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerID="8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15" exitCode=0 Oct 03 17:34:00 crc kubenswrapper[5081]: I1003 17:34:00.783917 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n5hv" event={"ID":"14d6dc93-073e-4514-8004-6fc3783e39b5","Type":"ContainerDied","Data":"8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15"} Oct 03 17:34:00 crc kubenswrapper[5081]: I1003 17:34:00.784190 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n5hv" event={"ID":"14d6dc93-073e-4514-8004-6fc3783e39b5","Type":"ContainerStarted","Data":"b634a35e4adb6c05aae5db8f8676d29d32cf416e4fc15cea8ee3accf11354889"} Oct 03 17:34:02 crc kubenswrapper[5081]: I1003 17:34:02.804850 5081 generic.go:334] "Generic (PLEG): container finished" podID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerID="ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c" exitCode=0 Oct 03 17:34:02 crc kubenswrapper[5081]: I1003 17:34:02.804903 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n5hv" event={"ID":"14d6dc93-073e-4514-8004-6fc3783e39b5","Type":"ContainerDied","Data":"ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c"} Oct 03 17:34:03 crc kubenswrapper[5081]: I1003 17:34:03.822332 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n5hv" event={"ID":"14d6dc93-073e-4514-8004-6fc3783e39b5","Type":"ContainerStarted","Data":"6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268"} Oct 03 17:34:08 crc kubenswrapper[5081]: E1003 17:34:08.483959 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7552988a_62cb_429c_b959_44546d45ba71.slice/crio-conmon-a8b38b235f692e70b83d5fd44fb903d15051b62ba82f6bf7681f348d173443a4.scope\": RecentStats: unable to find data in memory cache]" Oct 03 17:34:08 crc kubenswrapper[5081]: I1003 17:34:08.889780 5081 generic.go:334] "Generic (PLEG): container finished" podID="7552988a-62cb-429c-b959-44546d45ba71" containerID="a8b38b235f692e70b83d5fd44fb903d15051b62ba82f6bf7681f348d173443a4" exitCode=0 Oct 03 17:34:08 crc kubenswrapper[5081]: I1003 17:34:08.889826 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" event={"ID":"7552988a-62cb-429c-b959-44546d45ba71","Type":"ContainerDied","Data":"a8b38b235f692e70b83d5fd44fb903d15051b62ba82f6bf7681f348d173443a4"} Oct 03 17:34:08 crc kubenswrapper[5081]: I1003 17:34:08.917663 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7n5hv" podStartSLOduration=7.309633709 podStartE2EDuration="9.917633936s" podCreationTimestamp="2025-10-03 17:33:59 +0000 UTC" firstStartedPulling="2025-10-03 17:34:00.786775691 +0000 UTC m=+7559.752332324" lastFinishedPulling="2025-10-03 17:34:03.394775928 +0000 UTC m=+7562.360332551" observedRunningTime="2025-10-03 17:34:03.846646523 +0000 UTC m=+7562.812203206" watchObservedRunningTime="2025-10-03 17:34:08.917633936 +0000 UTC m=+7567.883190579" Oct 03 17:34:09 crc kubenswrapper[5081]: I1003 17:34:09.839930 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:34:09 crc kubenswrapper[5081]: I1003 17:34:09.840332 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.382052 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.485528 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7l8f\" (UniqueName: \"kubernetes.io/projected/7552988a-62cb-429c-b959-44546d45ba71-kube-api-access-v7l8f\") pod \"7552988a-62cb-429c-b959-44546d45ba71\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.485646 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ssh-key\") pod \"7552988a-62cb-429c-b959-44546d45ba71\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.485710 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-inventory\") pod \"7552988a-62cb-429c-b959-44546d45ba71\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.485746 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ceph\") pod \"7552988a-62cb-429c-b959-44546d45ba71\" (UID: \"7552988a-62cb-429c-b959-44546d45ba71\") " Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.491873 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7552988a-62cb-429c-b959-44546d45ba71-kube-api-access-v7l8f" (OuterVolumeSpecName: "kube-api-access-v7l8f") pod "7552988a-62cb-429c-b959-44546d45ba71" (UID: "7552988a-62cb-429c-b959-44546d45ba71"). InnerVolumeSpecName "kube-api-access-v7l8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.495043 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ceph" (OuterVolumeSpecName: "ceph") pod "7552988a-62cb-429c-b959-44546d45ba71" (UID: "7552988a-62cb-429c-b959-44546d45ba71"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.514279 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7552988a-62cb-429c-b959-44546d45ba71" (UID: "7552988a-62cb-429c-b959-44546d45ba71"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.518273 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-inventory" (OuterVolumeSpecName: "inventory") pod "7552988a-62cb-429c-b959-44546d45ba71" (UID: "7552988a-62cb-429c-b959-44546d45ba71"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.587923 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7l8f\" (UniqueName: \"kubernetes.io/projected/7552988a-62cb-429c-b959-44546d45ba71-kube-api-access-v7l8f\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.587954 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.587964 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.587972 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7552988a-62cb-429c-b959-44546d45ba71-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.885921 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7n5hv" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="registry-server" probeResult="failure" output=< Oct 03 17:34:10 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 17:34:10 crc kubenswrapper[5081]: > Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.911603 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" event={"ID":"7552988a-62cb-429c-b959-44546d45ba71","Type":"ContainerDied","Data":"c7bf30872db3a9e672a1e7395a503b1c090c23fa25a82b17ca0e41d95293b05d"} Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.911638 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7bf30872db3a9e672a1e7395a503b1c090c23fa25a82b17ca0e41d95293b05d" Oct 03 17:34:10 crc kubenswrapper[5081]: I1003 17:34:10.911687 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-tdk8r" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.018459 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-srdwj"] Oct 03 17:34:11 crc kubenswrapper[5081]: E1003 17:34:11.018971 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7552988a-62cb-429c-b959-44546d45ba71" containerName="configure-network-openstack-openstack-cell1" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.018992 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7552988a-62cb-429c-b959-44546d45ba71" containerName="configure-network-openstack-openstack-cell1" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.019305 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7552988a-62cb-429c-b959-44546d45ba71" containerName="configure-network-openstack-openstack-cell1" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.020177 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.022576 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.022667 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.026109 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.031281 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.031404 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-srdwj"] Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.096883 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-inventory\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.096978 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fn99\" (UniqueName: \"kubernetes.io/projected/890f24f0-7e77-4fb6-a241-51f148fba79c-kube-api-access-7fn99\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.097023 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ceph\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.097186 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ssh-key\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.199717 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ceph\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.199842 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ssh-key\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.200107 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-inventory\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.200200 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fn99\" (UniqueName: \"kubernetes.io/projected/890f24f0-7e77-4fb6-a241-51f148fba79c-kube-api-access-7fn99\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.208497 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ceph\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.209821 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-inventory\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.209914 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ssh-key\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.219840 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fn99\" (UniqueName: \"kubernetes.io/projected/890f24f0-7e77-4fb6-a241-51f148fba79c-kube-api-access-7fn99\") pod \"validate-network-openstack-openstack-cell1-srdwj\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:11 crc kubenswrapper[5081]: I1003 17:34:11.354098 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:12 crc kubenswrapper[5081]: I1003 17:34:12.549816 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-srdwj"] Oct 03 17:34:12 crc kubenswrapper[5081]: I1003 17:34:12.935074 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" event={"ID":"890f24f0-7e77-4fb6-a241-51f148fba79c","Type":"ContainerStarted","Data":"f17986623fc1cb5e00a726b92a6593b5b871555f52fa57eb71731fd6860b9328"} Oct 03 17:34:13 crc kubenswrapper[5081]: I1003 17:34:13.944275 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" event={"ID":"890f24f0-7e77-4fb6-a241-51f148fba79c","Type":"ContainerStarted","Data":"d014bff93083a8f49bc16c9bc7b1364fb1b130a9e49816449c473f2004848924"} Oct 03 17:34:13 crc kubenswrapper[5081]: I1003 17:34:13.971005 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" podStartSLOduration=3.113446324 podStartE2EDuration="3.970986563s" podCreationTimestamp="2025-10-03 17:34:10 +0000 UTC" firstStartedPulling="2025-10-03 17:34:12.547653459 +0000 UTC m=+7571.513210072" lastFinishedPulling="2025-10-03 17:34:13.405193698 +0000 UTC m=+7572.370750311" observedRunningTime="2025-10-03 17:34:13.963688314 +0000 UTC m=+7572.929244957" watchObservedRunningTime="2025-10-03 17:34:13.970986563 +0000 UTC m=+7572.936543176" Oct 03 17:34:19 crc kubenswrapper[5081]: I1003 17:34:19.001745 5081 generic.go:334] "Generic (PLEG): container finished" podID="890f24f0-7e77-4fb6-a241-51f148fba79c" containerID="d014bff93083a8f49bc16c9bc7b1364fb1b130a9e49816449c473f2004848924" exitCode=0 Oct 03 17:34:19 crc kubenswrapper[5081]: I1003 17:34:19.001857 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" event={"ID":"890f24f0-7e77-4fb6-a241-51f148fba79c","Type":"ContainerDied","Data":"d014bff93083a8f49bc16c9bc7b1364fb1b130a9e49816449c473f2004848924"} Oct 03 17:34:19 crc kubenswrapper[5081]: I1003 17:34:19.920945 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:34:19 crc kubenswrapper[5081]: I1003 17:34:19.999496 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.167255 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7n5hv"] Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.557010 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.630363 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-inventory\") pod \"890f24f0-7e77-4fb6-a241-51f148fba79c\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.658371 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-inventory" (OuterVolumeSpecName: "inventory") pod "890f24f0-7e77-4fb6-a241-51f148fba79c" (UID: "890f24f0-7e77-4fb6-a241-51f148fba79c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.731734 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ceph\") pod \"890f24f0-7e77-4fb6-a241-51f148fba79c\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.731783 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ssh-key\") pod \"890f24f0-7e77-4fb6-a241-51f148fba79c\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.731837 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fn99\" (UniqueName: \"kubernetes.io/projected/890f24f0-7e77-4fb6-a241-51f148fba79c-kube-api-access-7fn99\") pod \"890f24f0-7e77-4fb6-a241-51f148fba79c\" (UID: \"890f24f0-7e77-4fb6-a241-51f148fba79c\") " Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.732470 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.734510 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ceph" (OuterVolumeSpecName: "ceph") pod "890f24f0-7e77-4fb6-a241-51f148fba79c" (UID: "890f24f0-7e77-4fb6-a241-51f148fba79c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.735080 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/890f24f0-7e77-4fb6-a241-51f148fba79c-kube-api-access-7fn99" (OuterVolumeSpecName: "kube-api-access-7fn99") pod "890f24f0-7e77-4fb6-a241-51f148fba79c" (UID: "890f24f0-7e77-4fb6-a241-51f148fba79c"). InnerVolumeSpecName "kube-api-access-7fn99". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.758498 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "890f24f0-7e77-4fb6-a241-51f148fba79c" (UID: "890f24f0-7e77-4fb6-a241-51f148fba79c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.834744 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.834783 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/890f24f0-7e77-4fb6-a241-51f148fba79c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:20 crc kubenswrapper[5081]: I1003 17:34:20.834797 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fn99\" (UniqueName: \"kubernetes.io/projected/890f24f0-7e77-4fb6-a241-51f148fba79c-kube-api-access-7fn99\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.025186 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" event={"ID":"890f24f0-7e77-4fb6-a241-51f148fba79c","Type":"ContainerDied","Data":"f17986623fc1cb5e00a726b92a6593b5b871555f52fa57eb71731fd6860b9328"} Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.025237 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f17986623fc1cb5e00a726b92a6593b5b871555f52fa57eb71731fd6860b9328" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.025206 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-srdwj" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.036707 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7n5hv" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="registry-server" containerID="cri-o://6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268" gracePeriod=2 Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.128760 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-5sxn6"] Oct 03 17:34:21 crc kubenswrapper[5081]: E1003 17:34:21.129264 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="890f24f0-7e77-4fb6-a241-51f148fba79c" containerName="validate-network-openstack-openstack-cell1" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.129282 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="890f24f0-7e77-4fb6-a241-51f148fba79c" containerName="validate-network-openstack-openstack-cell1" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.129536 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="890f24f0-7e77-4fb6-a241-51f148fba79c" containerName="validate-network-openstack-openstack-cell1" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.130331 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.151456 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ssh-key\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.151739 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hndqj\" (UniqueName: \"kubernetes.io/projected/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-kube-api-access-hndqj\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.151879 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ceph\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.151670 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.152159 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-inventory\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.152243 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-5sxn6"] Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.152173 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.152293 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.157147 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.254347 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ssh-key\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.254754 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hndqj\" (UniqueName: \"kubernetes.io/projected/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-kube-api-access-hndqj\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.254806 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ceph\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.254924 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-inventory\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.259639 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-inventory\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.260244 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ceph\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.262988 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ssh-key\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.271296 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hndqj\" (UniqueName: \"kubernetes.io/projected/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-kube-api-access-hndqj\") pod \"install-os-openstack-openstack-cell1-5sxn6\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.430901 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.471347 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.566742 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-catalog-content\") pod \"14d6dc93-073e-4514-8004-6fc3783e39b5\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.567084 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpwqw\" (UniqueName: \"kubernetes.io/projected/14d6dc93-073e-4514-8004-6fc3783e39b5-kube-api-access-dpwqw\") pod \"14d6dc93-073e-4514-8004-6fc3783e39b5\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.567265 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-utilities\") pod \"14d6dc93-073e-4514-8004-6fc3783e39b5\" (UID: \"14d6dc93-073e-4514-8004-6fc3783e39b5\") " Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.568200 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-utilities" (OuterVolumeSpecName: "utilities") pod "14d6dc93-073e-4514-8004-6fc3783e39b5" (UID: "14d6dc93-073e-4514-8004-6fc3783e39b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.571341 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.583865 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14d6dc93-073e-4514-8004-6fc3783e39b5-kube-api-access-dpwqw" (OuterVolumeSpecName: "kube-api-access-dpwqw") pod "14d6dc93-073e-4514-8004-6fc3783e39b5" (UID: "14d6dc93-073e-4514-8004-6fc3783e39b5"). InnerVolumeSpecName "kube-api-access-dpwqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.648547 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14d6dc93-073e-4514-8004-6fc3783e39b5" (UID: "14d6dc93-073e-4514-8004-6fc3783e39b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.679842 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpwqw\" (UniqueName: \"kubernetes.io/projected/14d6dc93-073e-4514-8004-6fc3783e39b5-kube-api-access-dpwqw\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.679888 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14d6dc93-073e-4514-8004-6fc3783e39b5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:34:21 crc kubenswrapper[5081]: I1003 17:34:21.998242 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-5sxn6"] Oct 03 17:34:22 crc kubenswrapper[5081]: W1003 17:34:22.001007 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16e0fc84_51ae_4419_8ba0_0268aa8b5f6e.slice/crio-5294366d02698a602edf139216aa20b9d682d9e263c286a7b6fadc35ed55052d WatchSource:0}: Error finding container 5294366d02698a602edf139216aa20b9d682d9e263c286a7b6fadc35ed55052d: Status 404 returned error can't find the container with id 5294366d02698a602edf139216aa20b9d682d9e263c286a7b6fadc35ed55052d Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.040636 5081 generic.go:334] "Generic (PLEG): container finished" podID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerID="6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268" exitCode=0 Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.040705 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7n5hv" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.040752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n5hv" event={"ID":"14d6dc93-073e-4514-8004-6fc3783e39b5","Type":"ContainerDied","Data":"6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268"} Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.040794 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7n5hv" event={"ID":"14d6dc93-073e-4514-8004-6fc3783e39b5","Type":"ContainerDied","Data":"b634a35e4adb6c05aae5db8f8676d29d32cf416e4fc15cea8ee3accf11354889"} Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.040816 5081 scope.go:117] "RemoveContainer" containerID="6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.049742 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" event={"ID":"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e","Type":"ContainerStarted","Data":"5294366d02698a602edf139216aa20b9d682d9e263c286a7b6fadc35ed55052d"} Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.068005 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7n5hv"] Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.073780 5081 scope.go:117] "RemoveContainer" containerID="ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.078644 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7n5hv"] Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.101615 5081 scope.go:117] "RemoveContainer" containerID="8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.143661 5081 scope.go:117] "RemoveContainer" containerID="6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268" Oct 03 17:34:22 crc kubenswrapper[5081]: E1003 17:34:22.144023 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268\": container with ID starting with 6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268 not found: ID does not exist" containerID="6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.144052 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268"} err="failed to get container status \"6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268\": rpc error: code = NotFound desc = could not find container \"6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268\": container with ID starting with 6754602cd09a831bd2da22e7da905ca38ea93f7c074cf51b7bfb9a795a624268 not found: ID does not exist" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.144071 5081 scope.go:117] "RemoveContainer" containerID="ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c" Oct 03 17:34:22 crc kubenswrapper[5081]: E1003 17:34:22.144284 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c\": container with ID starting with ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c not found: ID does not exist" containerID="ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.144301 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c"} err="failed to get container status \"ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c\": rpc error: code = NotFound desc = could not find container \"ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c\": container with ID starting with ada6ead342662c18dba96e805df4ec3519aff56848c4e1df7510e250d871bf1c not found: ID does not exist" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.144312 5081 scope.go:117] "RemoveContainer" containerID="8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15" Oct 03 17:34:22 crc kubenswrapper[5081]: E1003 17:34:22.144946 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15\": container with ID starting with 8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15 not found: ID does not exist" containerID="8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15" Oct 03 17:34:22 crc kubenswrapper[5081]: I1003 17:34:22.144971 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15"} err="failed to get container status \"8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15\": rpc error: code = NotFound desc = could not find container \"8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15\": container with ID starting with 8d769656d371cc02a290fde0f25e1102f88065dba3e6e1378457b5b34f355f15 not found: ID does not exist" Oct 03 17:34:23 crc kubenswrapper[5081]: I1003 17:34:23.061825 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" event={"ID":"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e","Type":"ContainerStarted","Data":"2c33f3e0a9a886ac6adb85e4a954eed97420a19de3100d06a9c8556aacb88a8a"} Oct 03 17:34:23 crc kubenswrapper[5081]: I1003 17:34:23.078151 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" podStartSLOduration=1.5276529779999999 podStartE2EDuration="2.078135446s" podCreationTimestamp="2025-10-03 17:34:21 +0000 UTC" firstStartedPulling="2025-10-03 17:34:22.003342559 +0000 UTC m=+7580.968899172" lastFinishedPulling="2025-10-03 17:34:22.553825027 +0000 UTC m=+7581.519381640" observedRunningTime="2025-10-03 17:34:23.075097199 +0000 UTC m=+7582.040653812" watchObservedRunningTime="2025-10-03 17:34:23.078135446 +0000 UTC m=+7582.043692059" Oct 03 17:34:23 crc kubenswrapper[5081]: I1003 17:34:23.840148 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" path="/var/lib/kubelet/pods/14d6dc93-073e-4514-8004-6fc3783e39b5/volumes" Oct 03 17:34:30 crc kubenswrapper[5081]: I1003 17:34:30.647228 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:34:30 crc kubenswrapper[5081]: I1003 17:34:30.647835 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:34:30 crc kubenswrapper[5081]: I1003 17:34:30.647882 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:34:30 crc kubenswrapper[5081]: I1003 17:34:30.648902 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2733c2283282a30c156dd94be5629e180dfd2de2462d99bd2c6a54fef85d7766"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:34:30 crc kubenswrapper[5081]: I1003 17:34:30.648951 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://2733c2283282a30c156dd94be5629e180dfd2de2462d99bd2c6a54fef85d7766" gracePeriod=600 Oct 03 17:34:31 crc kubenswrapper[5081]: I1003 17:34:31.160014 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="2733c2283282a30c156dd94be5629e180dfd2de2462d99bd2c6a54fef85d7766" exitCode=0 Oct 03 17:34:31 crc kubenswrapper[5081]: I1003 17:34:31.160126 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"2733c2283282a30c156dd94be5629e180dfd2de2462d99bd2c6a54fef85d7766"} Oct 03 17:34:31 crc kubenswrapper[5081]: I1003 17:34:31.160651 5081 scope.go:117] "RemoveContainer" containerID="ca2ed76dd370410e9bc6f07f9628b044c00c224b6a1593c80499605b25f932f2" Oct 03 17:34:32 crc kubenswrapper[5081]: I1003 17:34:32.173173 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75"} Oct 03 17:35:06 crc kubenswrapper[5081]: I1003 17:35:06.589583 5081 generic.go:334] "Generic (PLEG): container finished" podID="16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" containerID="2c33f3e0a9a886ac6adb85e4a954eed97420a19de3100d06a9c8556aacb88a8a" exitCode=0 Oct 03 17:35:06 crc kubenswrapper[5081]: I1003 17:35:06.589592 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" event={"ID":"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e","Type":"ContainerDied","Data":"2c33f3e0a9a886ac6adb85e4a954eed97420a19de3100d06a9c8556aacb88a8a"} Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.110367 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.191251 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-inventory\") pod \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.191506 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ceph\") pod \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.191626 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ssh-key\") pod \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.191663 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hndqj\" (UniqueName: \"kubernetes.io/projected/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-kube-api-access-hndqj\") pod \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\" (UID: \"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e\") " Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.203150 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-kube-api-access-hndqj" (OuterVolumeSpecName: "kube-api-access-hndqj") pod "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" (UID: "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e"). InnerVolumeSpecName "kube-api-access-hndqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.257652 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-inventory" (OuterVolumeSpecName: "inventory") pod "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" (UID: "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.259724 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ceph" (OuterVolumeSpecName: "ceph") pod "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" (UID: "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.284665 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" (UID: "16e0fc84-51ae-4419-8ba0-0268aa8b5f6e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.295146 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.295184 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hndqj\" (UniqueName: \"kubernetes.io/projected/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-kube-api-access-hndqj\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.295195 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.295207 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/16e0fc84-51ae-4419-8ba0-0268aa8b5f6e-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.617100 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" event={"ID":"16e0fc84-51ae-4419-8ba0-0268aa8b5f6e","Type":"ContainerDied","Data":"5294366d02698a602edf139216aa20b9d682d9e263c286a7b6fadc35ed55052d"} Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.617171 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5294366d02698a602edf139216aa20b9d682d9e263c286a7b6fadc35ed55052d" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.617267 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-5sxn6" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.706683 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-g54tj"] Oct 03 17:35:08 crc kubenswrapper[5081]: E1003 17:35:08.707119 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="extract-content" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.707129 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="extract-content" Oct 03 17:35:08 crc kubenswrapper[5081]: E1003 17:35:08.707151 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="registry-server" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.707157 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="registry-server" Oct 03 17:35:08 crc kubenswrapper[5081]: E1003 17:35:08.707176 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="extract-utilities" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.707184 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="extract-utilities" Oct 03 17:35:08 crc kubenswrapper[5081]: E1003 17:35:08.707199 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" containerName="install-os-openstack-openstack-cell1" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.707208 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" containerName="install-os-openstack-openstack-cell1" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.707401 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="16e0fc84-51ae-4419-8ba0-0268aa8b5f6e" containerName="install-os-openstack-openstack-cell1" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.707421 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="14d6dc93-073e-4514-8004-6fc3783e39b5" containerName="registry-server" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.708321 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.719283 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.722045 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.722397 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.736509 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.756863 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-g54tj"] Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.807147 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ceph\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.807247 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ssh-key\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.807276 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-inventory\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.807316 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r2hr\" (UniqueName: \"kubernetes.io/projected/389263a2-812c-4ded-bc03-549916284b76-kube-api-access-5r2hr\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.910046 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ceph\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.910115 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ssh-key\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.910142 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-inventory\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.910770 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r2hr\" (UniqueName: \"kubernetes.io/projected/389263a2-812c-4ded-bc03-549916284b76-kube-api-access-5r2hr\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.914036 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ssh-key\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.914279 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-inventory\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.914731 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ceph\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:08 crc kubenswrapper[5081]: I1003 17:35:08.927278 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r2hr\" (UniqueName: \"kubernetes.io/projected/389263a2-812c-4ded-bc03-549916284b76-kube-api-access-5r2hr\") pod \"configure-os-openstack-openstack-cell1-g54tj\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:09 crc kubenswrapper[5081]: I1003 17:35:09.037219 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:09 crc kubenswrapper[5081]: I1003 17:35:09.664100 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-g54tj"] Oct 03 17:35:09 crc kubenswrapper[5081]: W1003 17:35:09.670568 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod389263a2_812c_4ded_bc03_549916284b76.slice/crio-2d2dea731df23a94ac87392b5eb3b9d4dce42ef4b9b711d6302d526c2ebf40e9 WatchSource:0}: Error finding container 2d2dea731df23a94ac87392b5eb3b9d4dce42ef4b9b711d6302d526c2ebf40e9: Status 404 returned error can't find the container with id 2d2dea731df23a94ac87392b5eb3b9d4dce42ef4b9b711d6302d526c2ebf40e9 Oct 03 17:35:10 crc kubenswrapper[5081]: I1003 17:35:10.637682 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" event={"ID":"389263a2-812c-4ded-bc03-549916284b76","Type":"ContainerStarted","Data":"11df1850b146700189b9fd7f3f060666b1ec4526cebbc80f73e4e26a35d43301"} Oct 03 17:35:10 crc kubenswrapper[5081]: I1003 17:35:10.638267 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" event={"ID":"389263a2-812c-4ded-bc03-549916284b76","Type":"ContainerStarted","Data":"2d2dea731df23a94ac87392b5eb3b9d4dce42ef4b9b711d6302d526c2ebf40e9"} Oct 03 17:35:10 crc kubenswrapper[5081]: I1003 17:35:10.663915 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" podStartSLOduration=2.172668733 podStartE2EDuration="2.663893865s" podCreationTimestamp="2025-10-03 17:35:08 +0000 UTC" firstStartedPulling="2025-10-03 17:35:09.675518592 +0000 UTC m=+7628.641075205" lastFinishedPulling="2025-10-03 17:35:10.166743694 +0000 UTC m=+7629.132300337" observedRunningTime="2025-10-03 17:35:10.65531587 +0000 UTC m=+7629.620872493" watchObservedRunningTime="2025-10-03 17:35:10.663893865 +0000 UTC m=+7629.629450488" Oct 03 17:35:57 crc kubenswrapper[5081]: I1003 17:35:57.143427 5081 generic.go:334] "Generic (PLEG): container finished" podID="389263a2-812c-4ded-bc03-549916284b76" containerID="11df1850b146700189b9fd7f3f060666b1ec4526cebbc80f73e4e26a35d43301" exitCode=0 Oct 03 17:35:57 crc kubenswrapper[5081]: I1003 17:35:57.143532 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" event={"ID":"389263a2-812c-4ded-bc03-549916284b76","Type":"ContainerDied","Data":"11df1850b146700189b9fd7f3f060666b1ec4526cebbc80f73e4e26a35d43301"} Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.651895 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.806869 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ceph\") pod \"389263a2-812c-4ded-bc03-549916284b76\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.807210 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5r2hr\" (UniqueName: \"kubernetes.io/projected/389263a2-812c-4ded-bc03-549916284b76-kube-api-access-5r2hr\") pod \"389263a2-812c-4ded-bc03-549916284b76\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.807238 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-inventory\") pod \"389263a2-812c-4ded-bc03-549916284b76\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.807425 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ssh-key\") pod \"389263a2-812c-4ded-bc03-549916284b76\" (UID: \"389263a2-812c-4ded-bc03-549916284b76\") " Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.811957 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ceph" (OuterVolumeSpecName: "ceph") pod "389263a2-812c-4ded-bc03-549916284b76" (UID: "389263a2-812c-4ded-bc03-549916284b76"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.813209 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/389263a2-812c-4ded-bc03-549916284b76-kube-api-access-5r2hr" (OuterVolumeSpecName: "kube-api-access-5r2hr") pod "389263a2-812c-4ded-bc03-549916284b76" (UID: "389263a2-812c-4ded-bc03-549916284b76"). InnerVolumeSpecName "kube-api-access-5r2hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.835901 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "389263a2-812c-4ded-bc03-549916284b76" (UID: "389263a2-812c-4ded-bc03-549916284b76"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.836321 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-inventory" (OuterVolumeSpecName: "inventory") pod "389263a2-812c-4ded-bc03-549916284b76" (UID: "389263a2-812c-4ded-bc03-549916284b76"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.909680 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.909710 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.909721 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5r2hr\" (UniqueName: \"kubernetes.io/projected/389263a2-812c-4ded-bc03-549916284b76-kube-api-access-5r2hr\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:58 crc kubenswrapper[5081]: I1003 17:35:58.909729 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/389263a2-812c-4ded-bc03-549916284b76-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.167095 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" event={"ID":"389263a2-812c-4ded-bc03-549916284b76","Type":"ContainerDied","Data":"2d2dea731df23a94ac87392b5eb3b9d4dce42ef4b9b711d6302d526c2ebf40e9"} Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.167133 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d2dea731df23a94ac87392b5eb3b9d4dce42ef4b9b711d6302d526c2ebf40e9" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.167143 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-g54tj" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.264853 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-9gkk8"] Oct 03 17:35:59 crc kubenswrapper[5081]: E1003 17:35:59.265399 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389263a2-812c-4ded-bc03-549916284b76" containerName="configure-os-openstack-openstack-cell1" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.265427 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="389263a2-812c-4ded-bc03-549916284b76" containerName="configure-os-openstack-openstack-cell1" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.265724 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="389263a2-812c-4ded-bc03-549916284b76" containerName="configure-os-openstack-openstack-cell1" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.266747 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.269278 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.269532 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.270887 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.270907 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.276659 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-9gkk8"] Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.418926 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5r5z\" (UniqueName: \"kubernetes.io/projected/c1204f6d-ed27-4b6d-bd88-838314a990be-kube-api-access-m5r5z\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.419315 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ceph\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.419508 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.419605 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-inventory-0\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.522017 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5r5z\" (UniqueName: \"kubernetes.io/projected/c1204f6d-ed27-4b6d-bd88-838314a990be-kube-api-access-m5r5z\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.522169 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ceph\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.522352 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.522390 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-inventory-0\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.527148 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.529374 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-inventory-0\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.532278 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ceph\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.546542 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5r5z\" (UniqueName: \"kubernetes.io/projected/c1204f6d-ed27-4b6d-bd88-838314a990be-kube-api-access-m5r5z\") pod \"ssh-known-hosts-openstack-9gkk8\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:35:59 crc kubenswrapper[5081]: I1003 17:35:59.582789 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:36:00 crc kubenswrapper[5081]: I1003 17:36:00.254230 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-9gkk8"] Oct 03 17:36:01 crc kubenswrapper[5081]: I1003 17:36:01.188424 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-9gkk8" event={"ID":"c1204f6d-ed27-4b6d-bd88-838314a990be","Type":"ContainerStarted","Data":"5c5168c22a1c2a462f5a148efa3592b46fd32079698637737f0cc08612912d21"} Oct 03 17:36:01 crc kubenswrapper[5081]: I1003 17:36:01.188757 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-9gkk8" event={"ID":"c1204f6d-ed27-4b6d-bd88-838314a990be","Type":"ContainerStarted","Data":"b47c653525dd7bd7aa16715d446a4d1859af7278914f8106fe61be98fafcdc6e"} Oct 03 17:36:01 crc kubenswrapper[5081]: I1003 17:36:01.226897 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-9gkk8" podStartSLOduration=1.68238514 podStartE2EDuration="2.226860667s" podCreationTimestamp="2025-10-03 17:35:59 +0000 UTC" firstStartedPulling="2025-10-03 17:36:00.260894525 +0000 UTC m=+7679.226451158" lastFinishedPulling="2025-10-03 17:36:00.805370072 +0000 UTC m=+7679.770926685" observedRunningTime="2025-10-03 17:36:01.221098752 +0000 UTC m=+7680.186655425" watchObservedRunningTime="2025-10-03 17:36:01.226860667 +0000 UTC m=+7680.192417330" Oct 03 17:36:10 crc kubenswrapper[5081]: I1003 17:36:10.280377 5081 generic.go:334] "Generic (PLEG): container finished" podID="c1204f6d-ed27-4b6d-bd88-838314a990be" containerID="5c5168c22a1c2a462f5a148efa3592b46fd32079698637737f0cc08612912d21" exitCode=0 Oct 03 17:36:10 crc kubenswrapper[5081]: I1003 17:36:10.280585 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-9gkk8" event={"ID":"c1204f6d-ed27-4b6d-bd88-838314a990be","Type":"ContainerDied","Data":"5c5168c22a1c2a462f5a148efa3592b46fd32079698637737f0cc08612912d21"} Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.716506 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.792227 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ssh-key-openstack-cell1\") pod \"c1204f6d-ed27-4b6d-bd88-838314a990be\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.792316 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ceph\") pod \"c1204f6d-ed27-4b6d-bd88-838314a990be\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.792395 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5r5z\" (UniqueName: \"kubernetes.io/projected/c1204f6d-ed27-4b6d-bd88-838314a990be-kube-api-access-m5r5z\") pod \"c1204f6d-ed27-4b6d-bd88-838314a990be\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.792504 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-inventory-0\") pod \"c1204f6d-ed27-4b6d-bd88-838314a990be\" (UID: \"c1204f6d-ed27-4b6d-bd88-838314a990be\") " Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.797940 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ceph" (OuterVolumeSpecName: "ceph") pod "c1204f6d-ed27-4b6d-bd88-838314a990be" (UID: "c1204f6d-ed27-4b6d-bd88-838314a990be"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.804862 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1204f6d-ed27-4b6d-bd88-838314a990be-kube-api-access-m5r5z" (OuterVolumeSpecName: "kube-api-access-m5r5z") pod "c1204f6d-ed27-4b6d-bd88-838314a990be" (UID: "c1204f6d-ed27-4b6d-bd88-838314a990be"). InnerVolumeSpecName "kube-api-access-m5r5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.825559 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "c1204f6d-ed27-4b6d-bd88-838314a990be" (UID: "c1204f6d-ed27-4b6d-bd88-838314a990be"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.835163 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "c1204f6d-ed27-4b6d-bd88-838314a990be" (UID: "c1204f6d-ed27-4b6d-bd88-838314a990be"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.895411 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.895443 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5r5z\" (UniqueName: \"kubernetes.io/projected/c1204f6d-ed27-4b6d-bd88-838314a990be-kube-api-access-m5r5z\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.895456 5081 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:11 crc kubenswrapper[5081]: I1003 17:36:11.895466 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c1204f6d-ed27-4b6d-bd88-838314a990be-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.301026 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-9gkk8" event={"ID":"c1204f6d-ed27-4b6d-bd88-838314a990be","Type":"ContainerDied","Data":"b47c653525dd7bd7aa16715d446a4d1859af7278914f8106fe61be98fafcdc6e"} Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.301352 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b47c653525dd7bd7aa16715d446a4d1859af7278914f8106fe61be98fafcdc6e" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.301054 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-9gkk8" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.375649 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-d4q95"] Oct 03 17:36:12 crc kubenswrapper[5081]: E1003 17:36:12.376191 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1204f6d-ed27-4b6d-bd88-838314a990be" containerName="ssh-known-hosts-openstack" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.376211 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1204f6d-ed27-4b6d-bd88-838314a990be" containerName="ssh-known-hosts-openstack" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.376509 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1204f6d-ed27-4b6d-bd88-838314a990be" containerName="ssh-known-hosts-openstack" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.377437 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.380758 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.380950 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.381082 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.392223 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.397090 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-d4q95"] Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.509207 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2md6h\" (UniqueName: \"kubernetes.io/projected/458f0d57-1001-444a-ba77-15cb42cc0ceb-kube-api-access-2md6h\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.509351 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ssh-key\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.509387 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ceph\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.509440 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-inventory\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.610746 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2md6h\" (UniqueName: \"kubernetes.io/projected/458f0d57-1001-444a-ba77-15cb42cc0ceb-kube-api-access-2md6h\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.610897 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ssh-key\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.610940 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ceph\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.611023 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-inventory\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.615684 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ceph\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.619156 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-inventory\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.629309 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2md6h\" (UniqueName: \"kubernetes.io/projected/458f0d57-1001-444a-ba77-15cb42cc0ceb-kube-api-access-2md6h\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.629700 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ssh-key\") pod \"run-os-openstack-openstack-cell1-d4q95\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:12 crc kubenswrapper[5081]: I1003 17:36:12.708806 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:13 crc kubenswrapper[5081]: I1003 17:36:13.195898 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-d4q95"] Oct 03 17:36:13 crc kubenswrapper[5081]: W1003 17:36:13.200285 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod458f0d57_1001_444a_ba77_15cb42cc0ceb.slice/crio-d2311378c6c74ba97f48cb8cbc7d3efc023b7bfa65f129ab35516e42940a8b38 WatchSource:0}: Error finding container d2311378c6c74ba97f48cb8cbc7d3efc023b7bfa65f129ab35516e42940a8b38: Status 404 returned error can't find the container with id d2311378c6c74ba97f48cb8cbc7d3efc023b7bfa65f129ab35516e42940a8b38 Oct 03 17:36:13 crc kubenswrapper[5081]: I1003 17:36:13.312187 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4q95" event={"ID":"458f0d57-1001-444a-ba77-15cb42cc0ceb","Type":"ContainerStarted","Data":"d2311378c6c74ba97f48cb8cbc7d3efc023b7bfa65f129ab35516e42940a8b38"} Oct 03 17:36:14 crc kubenswrapper[5081]: I1003 17:36:14.327272 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4q95" event={"ID":"458f0d57-1001-444a-ba77-15cb42cc0ceb","Type":"ContainerStarted","Data":"d12047585e339559065bc4448793a0c50a63674299b145cd4242fbdadd43bfb2"} Oct 03 17:36:14 crc kubenswrapper[5081]: I1003 17:36:14.346437 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-d4q95" podStartSLOduration=1.8354890529999999 podStartE2EDuration="2.346421389s" podCreationTimestamp="2025-10-03 17:36:12 +0000 UTC" firstStartedPulling="2025-10-03 17:36:13.202577525 +0000 UTC m=+7692.168134138" lastFinishedPulling="2025-10-03 17:36:13.713509821 +0000 UTC m=+7692.679066474" observedRunningTime="2025-10-03 17:36:14.344308069 +0000 UTC m=+7693.309864682" watchObservedRunningTime="2025-10-03 17:36:14.346421389 +0000 UTC m=+7693.311978002" Oct 03 17:36:21 crc kubenswrapper[5081]: I1003 17:36:21.395987 5081 generic.go:334] "Generic (PLEG): container finished" podID="458f0d57-1001-444a-ba77-15cb42cc0ceb" containerID="d12047585e339559065bc4448793a0c50a63674299b145cd4242fbdadd43bfb2" exitCode=0 Oct 03 17:36:21 crc kubenswrapper[5081]: I1003 17:36:21.396020 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4q95" event={"ID":"458f0d57-1001-444a-ba77-15cb42cc0ceb","Type":"ContainerDied","Data":"d12047585e339559065bc4448793a0c50a63674299b145cd4242fbdadd43bfb2"} Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.826462 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.937186 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2md6h\" (UniqueName: \"kubernetes.io/projected/458f0d57-1001-444a-ba77-15cb42cc0ceb-kube-api-access-2md6h\") pod \"458f0d57-1001-444a-ba77-15cb42cc0ceb\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.937349 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ssh-key\") pod \"458f0d57-1001-444a-ba77-15cb42cc0ceb\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.937407 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ceph\") pod \"458f0d57-1001-444a-ba77-15cb42cc0ceb\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.937432 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-inventory\") pod \"458f0d57-1001-444a-ba77-15cb42cc0ceb\" (UID: \"458f0d57-1001-444a-ba77-15cb42cc0ceb\") " Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.943364 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ceph" (OuterVolumeSpecName: "ceph") pod "458f0d57-1001-444a-ba77-15cb42cc0ceb" (UID: "458f0d57-1001-444a-ba77-15cb42cc0ceb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.945220 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/458f0d57-1001-444a-ba77-15cb42cc0ceb-kube-api-access-2md6h" (OuterVolumeSpecName: "kube-api-access-2md6h") pod "458f0d57-1001-444a-ba77-15cb42cc0ceb" (UID: "458f0d57-1001-444a-ba77-15cb42cc0ceb"). InnerVolumeSpecName "kube-api-access-2md6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.971123 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "458f0d57-1001-444a-ba77-15cb42cc0ceb" (UID: "458f0d57-1001-444a-ba77-15cb42cc0ceb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:22 crc kubenswrapper[5081]: I1003 17:36:22.974688 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-inventory" (OuterVolumeSpecName: "inventory") pod "458f0d57-1001-444a-ba77-15cb42cc0ceb" (UID: "458f0d57-1001-444a-ba77-15cb42cc0ceb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.040146 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2md6h\" (UniqueName: \"kubernetes.io/projected/458f0d57-1001-444a-ba77-15cb42cc0ceb-kube-api-access-2md6h\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.040213 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.040222 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.040230 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/458f0d57-1001-444a-ba77-15cb42cc0ceb-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.419802 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4q95" event={"ID":"458f0d57-1001-444a-ba77-15cb42cc0ceb","Type":"ContainerDied","Data":"d2311378c6c74ba97f48cb8cbc7d3efc023b7bfa65f129ab35516e42940a8b38"} Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.419847 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2311378c6c74ba97f48cb8cbc7d3efc023b7bfa65f129ab35516e42940a8b38" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.419886 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4q95" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.489377 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-r6dwf"] Oct 03 17:36:23 crc kubenswrapper[5081]: E1003 17:36:23.489839 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="458f0d57-1001-444a-ba77-15cb42cc0ceb" containerName="run-os-openstack-openstack-cell1" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.489855 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="458f0d57-1001-444a-ba77-15cb42cc0ceb" containerName="run-os-openstack-openstack-cell1" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.490066 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="458f0d57-1001-444a-ba77-15cb42cc0ceb" containerName="run-os-openstack-openstack-cell1" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.490940 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.493612 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.493780 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.493828 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.499640 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-r6dwf"] Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.499778 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.551244 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs452\" (UniqueName: \"kubernetes.io/projected/530974f1-240c-4409-895f-c0d97190e235-kube-api-access-rs452\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.551462 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.551577 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ceph\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.551751 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-inventory\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.654296 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-inventory\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.654421 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs452\" (UniqueName: \"kubernetes.io/projected/530974f1-240c-4409-895f-c0d97190e235-kube-api-access-rs452\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.654487 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.654516 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ceph\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.658217 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-inventory\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.658806 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.659197 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ceph\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.668869 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs452\" (UniqueName: \"kubernetes.io/projected/530974f1-240c-4409-895f-c0d97190e235-kube-api-access-rs452\") pod \"reboot-os-openstack-openstack-cell1-r6dwf\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:23 crc kubenswrapper[5081]: I1003 17:36:23.815343 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:24 crc kubenswrapper[5081]: I1003 17:36:24.346621 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-r6dwf"] Oct 03 17:36:24 crc kubenswrapper[5081]: I1003 17:36:24.430206 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" event={"ID":"530974f1-240c-4409-895f-c0d97190e235","Type":"ContainerStarted","Data":"8685ca43529bb80c00efa029213967e393a324bf8dacf3d4fece1820f9c5e3ab"} Oct 03 17:36:25 crc kubenswrapper[5081]: I1003 17:36:25.447355 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" event={"ID":"530974f1-240c-4409-895f-c0d97190e235","Type":"ContainerStarted","Data":"34e7075137d3e0e40ea49f9284ca8d5c6c5e4d0353a31d305f7e88d5c2a311fb"} Oct 03 17:36:25 crc kubenswrapper[5081]: I1003 17:36:25.472368 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" podStartSLOduration=1.9954251090000001 podStartE2EDuration="2.472347752s" podCreationTimestamp="2025-10-03 17:36:23 +0000 UTC" firstStartedPulling="2025-10-03 17:36:24.357832577 +0000 UTC m=+7703.323389190" lastFinishedPulling="2025-10-03 17:36:24.83475522 +0000 UTC m=+7703.800311833" observedRunningTime="2025-10-03 17:36:25.467887164 +0000 UTC m=+7704.433443797" watchObservedRunningTime="2025-10-03 17:36:25.472347752 +0000 UTC m=+7704.437904395" Oct 03 17:36:40 crc kubenswrapper[5081]: I1003 17:36:40.588532 5081 generic.go:334] "Generic (PLEG): container finished" podID="530974f1-240c-4409-895f-c0d97190e235" containerID="34e7075137d3e0e40ea49f9284ca8d5c6c5e4d0353a31d305f7e88d5c2a311fb" exitCode=0 Oct 03 17:36:40 crc kubenswrapper[5081]: I1003 17:36:40.588776 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" event={"ID":"530974f1-240c-4409-895f-c0d97190e235","Type":"ContainerDied","Data":"34e7075137d3e0e40ea49f9284ca8d5c6c5e4d0353a31d305f7e88d5c2a311fb"} Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.038099 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.172607 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs452\" (UniqueName: \"kubernetes.io/projected/530974f1-240c-4409-895f-c0d97190e235-kube-api-access-rs452\") pod \"530974f1-240c-4409-895f-c0d97190e235\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.172730 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ssh-key\") pod \"530974f1-240c-4409-895f-c0d97190e235\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.172892 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-inventory\") pod \"530974f1-240c-4409-895f-c0d97190e235\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.173004 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ceph\") pod \"530974f1-240c-4409-895f-c0d97190e235\" (UID: \"530974f1-240c-4409-895f-c0d97190e235\") " Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.179186 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/530974f1-240c-4409-895f-c0d97190e235-kube-api-access-rs452" (OuterVolumeSpecName: "kube-api-access-rs452") pod "530974f1-240c-4409-895f-c0d97190e235" (UID: "530974f1-240c-4409-895f-c0d97190e235"). InnerVolumeSpecName "kube-api-access-rs452". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.180145 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ceph" (OuterVolumeSpecName: "ceph") pod "530974f1-240c-4409-895f-c0d97190e235" (UID: "530974f1-240c-4409-895f-c0d97190e235"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.202693 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-inventory" (OuterVolumeSpecName: "inventory") pod "530974f1-240c-4409-895f-c0d97190e235" (UID: "530974f1-240c-4409-895f-c0d97190e235"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.208462 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "530974f1-240c-4409-895f-c0d97190e235" (UID: "530974f1-240c-4409-895f-c0d97190e235"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.274871 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.274898 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.274909 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs452\" (UniqueName: \"kubernetes.io/projected/530974f1-240c-4409-895f-c0d97190e235-kube-api-access-rs452\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.274939 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/530974f1-240c-4409-895f-c0d97190e235-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.613911 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" event={"ID":"530974f1-240c-4409-895f-c0d97190e235","Type":"ContainerDied","Data":"8685ca43529bb80c00efa029213967e393a324bf8dacf3d4fece1820f9c5e3ab"} Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.613966 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8685ca43529bb80c00efa029213967e393a324bf8dacf3d4fece1820f9c5e3ab" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.613971 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-r6dwf" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.747721 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-g6jcr"] Oct 03 17:36:42 crc kubenswrapper[5081]: E1003 17:36:42.748148 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="530974f1-240c-4409-895f-c0d97190e235" containerName="reboot-os-openstack-openstack-cell1" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.748164 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="530974f1-240c-4409-895f-c0d97190e235" containerName="reboot-os-openstack-openstack-cell1" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.748363 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="530974f1-240c-4409-895f-c0d97190e235" containerName="reboot-os-openstack-openstack-cell1" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.749390 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.751374 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.751745 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.752052 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.755735 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.761192 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-g6jcr"] Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.892799 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893200 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893276 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ssh-key\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893577 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893769 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-inventory\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893839 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893860 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qns6f\" (UniqueName: \"kubernetes.io/projected/f146ceaa-8661-4678-9ac0-8b6758943586-kube-api-access-qns6f\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893897 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.893976 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.894038 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ceph\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.894102 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.894181 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.997780 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.997895 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qns6f\" (UniqueName: \"kubernetes.io/projected/f146ceaa-8661-4678-9ac0-8b6758943586-kube-api-access-qns6f\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.997972 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998184 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998284 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ceph\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998395 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998478 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998632 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998714 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998791 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ssh-key\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.998898 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:42 crc kubenswrapper[5081]: I1003 17:36:42.999084 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-inventory\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.004057 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.004514 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.004760 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ceph\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.005129 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ssh-key\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.005696 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.008193 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.017454 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.017578 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.018346 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qns6f\" (UniqueName: \"kubernetes.io/projected/f146ceaa-8661-4678-9ac0-8b6758943586-kube-api-access-qns6f\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.018362 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.018942 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.022946 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-inventory\") pod \"install-certs-openstack-openstack-cell1-g6jcr\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.075331 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:36:43 crc kubenswrapper[5081]: I1003 17:36:43.632919 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-g6jcr"] Oct 03 17:36:43 crc kubenswrapper[5081]: W1003 17:36:43.643992 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf146ceaa_8661_4678_9ac0_8b6758943586.slice/crio-6b35bdcb42b224a0bd0aee45d20f8b66db6264aee2d1d89e88aca26d6426b120 WatchSource:0}: Error finding container 6b35bdcb42b224a0bd0aee45d20f8b66db6264aee2d1d89e88aca26d6426b120: Status 404 returned error can't find the container with id 6b35bdcb42b224a0bd0aee45d20f8b66db6264aee2d1d89e88aca26d6426b120 Oct 03 17:36:44 crc kubenswrapper[5081]: I1003 17:36:44.638308 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" event={"ID":"f146ceaa-8661-4678-9ac0-8b6758943586","Type":"ContainerStarted","Data":"24e916b62fe76d51d6fcdc1d8c5250b6d22d5f2db173c56e361f5c855ce2d325"} Oct 03 17:36:44 crc kubenswrapper[5081]: I1003 17:36:44.638834 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" event={"ID":"f146ceaa-8661-4678-9ac0-8b6758943586","Type":"ContainerStarted","Data":"6b35bdcb42b224a0bd0aee45d20f8b66db6264aee2d1d89e88aca26d6426b120"} Oct 03 17:36:44 crc kubenswrapper[5081]: I1003 17:36:44.662316 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" podStartSLOduration=2.257956612 podStartE2EDuration="2.662298045s" podCreationTimestamp="2025-10-03 17:36:42 +0000 UTC" firstStartedPulling="2025-10-03 17:36:43.647783964 +0000 UTC m=+7722.613340597" lastFinishedPulling="2025-10-03 17:36:44.052125417 +0000 UTC m=+7723.017682030" observedRunningTime="2025-10-03 17:36:44.653347528 +0000 UTC m=+7723.618904141" watchObservedRunningTime="2025-10-03 17:36:44.662298045 +0000 UTC m=+7723.627854658" Oct 03 17:37:00 crc kubenswrapper[5081]: I1003 17:37:00.647501 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:37:00 crc kubenswrapper[5081]: I1003 17:37:00.648109 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:37:02 crc kubenswrapper[5081]: I1003 17:37:02.819928 5081 generic.go:334] "Generic (PLEG): container finished" podID="f146ceaa-8661-4678-9ac0-8b6758943586" containerID="24e916b62fe76d51d6fcdc1d8c5250b6d22d5f2db173c56e361f5c855ce2d325" exitCode=0 Oct 03 17:37:02 crc kubenswrapper[5081]: I1003 17:37:02.820260 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" event={"ID":"f146ceaa-8661-4678-9ac0-8b6758943586","Type":"ContainerDied","Data":"24e916b62fe76d51d6fcdc1d8c5250b6d22d5f2db173c56e361f5c855ce2d325"} Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.306853 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385141 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qns6f\" (UniqueName: \"kubernetes.io/projected/f146ceaa-8661-4678-9ac0-8b6758943586-kube-api-access-qns6f\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385227 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-sriov-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385294 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-inventory\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385392 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-libvirt-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385430 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-bootstrap-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385477 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-nova-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385507 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-dhcp-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385587 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ssh-key\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385612 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ovn-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385695 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-telemetry-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385711 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ceph\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.385728 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-metadata-combined-ca-bundle\") pod \"f146ceaa-8661-4678-9ac0-8b6758943586\" (UID: \"f146ceaa-8661-4678-9ac0-8b6758943586\") " Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.392250 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.392576 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.394496 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.394619 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ceph" (OuterVolumeSpecName: "ceph") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.394814 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.394846 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.395133 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.396478 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f146ceaa-8661-4678-9ac0-8b6758943586-kube-api-access-qns6f" (OuterVolumeSpecName: "kube-api-access-qns6f") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "kube-api-access-qns6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.396485 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.397532 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.420200 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.427739 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-inventory" (OuterVolumeSpecName: "inventory") pod "f146ceaa-8661-4678-9ac0-8b6758943586" (UID: "f146ceaa-8661-4678-9ac0-8b6758943586"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.488485 5081 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.488831 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.488926 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489016 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489097 5081 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489180 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489258 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489357 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qns6f\" (UniqueName: \"kubernetes.io/projected/f146ceaa-8661-4678-9ac0-8b6758943586-kube-api-access-qns6f\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489439 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489647 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489728 5081 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.489804 5081 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f146ceaa-8661-4678-9ac0-8b6758943586-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.839661 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" event={"ID":"f146ceaa-8661-4678-9ac0-8b6758943586","Type":"ContainerDied","Data":"6b35bdcb42b224a0bd0aee45d20f8b66db6264aee2d1d89e88aca26d6426b120"} Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.839702 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b35bdcb42b224a0bd0aee45d20f8b66db6264aee2d1d89e88aca26d6426b120" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.839753 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-g6jcr" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.932374 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tvwmz"] Oct 03 17:37:04 crc kubenswrapper[5081]: E1003 17:37:04.933552 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f146ceaa-8661-4678-9ac0-8b6758943586" containerName="install-certs-openstack-openstack-cell1" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.933597 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f146ceaa-8661-4678-9ac0-8b6758943586" containerName="install-certs-openstack-openstack-cell1" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.933910 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f146ceaa-8661-4678-9ac0-8b6758943586" containerName="install-certs-openstack-openstack-cell1" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.935039 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.940022 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.940068 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.940094 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.940097 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.949042 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tvwmz"] Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.998263 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ceph\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.998739 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqkr2\" (UniqueName: \"kubernetes.io/projected/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-kube-api-access-dqkr2\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.998785 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:04 crc kubenswrapper[5081]: I1003 17:37:04.998849 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-inventory\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.100552 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqkr2\" (UniqueName: \"kubernetes.io/projected/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-kube-api-access-dqkr2\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.100622 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.100677 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-inventory\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.100744 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ceph\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.106243 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.117578 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ceph\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.121032 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-inventory\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.121683 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqkr2\" (UniqueName: \"kubernetes.io/projected/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-kube-api-access-dqkr2\") pod \"ceph-client-openstack-openstack-cell1-tvwmz\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.253979 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.809054 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tvwmz"] Oct 03 17:37:05 crc kubenswrapper[5081]: W1003 17:37:05.813604 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda37cad0b_caa9_4c10_a77a_a805bdf7ea1f.slice/crio-7210b996e7e78614745b209dc5f2476639fd811a35664b69ee9367f3e62f815c WatchSource:0}: Error finding container 7210b996e7e78614745b209dc5f2476639fd811a35664b69ee9367f3e62f815c: Status 404 returned error can't find the container with id 7210b996e7e78614745b209dc5f2476639fd811a35664b69ee9367f3e62f815c Oct 03 17:37:05 crc kubenswrapper[5081]: I1003 17:37:05.850735 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" event={"ID":"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f","Type":"ContainerStarted","Data":"7210b996e7e78614745b209dc5f2476639fd811a35664b69ee9367f3e62f815c"} Oct 03 17:37:06 crc kubenswrapper[5081]: I1003 17:37:06.860797 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" event={"ID":"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f","Type":"ContainerStarted","Data":"510c646ecf1ed6e288e97f4d2a4cc519febe710e57298b599e435a5178d0cc4e"} Oct 03 17:37:06 crc kubenswrapper[5081]: I1003 17:37:06.877084 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" podStartSLOduration=2.405232911 podStartE2EDuration="2.877063588s" podCreationTimestamp="2025-10-03 17:37:04 +0000 UTC" firstStartedPulling="2025-10-03 17:37:05.816095726 +0000 UTC m=+7744.781652339" lastFinishedPulling="2025-10-03 17:37:06.287926403 +0000 UTC m=+7745.253483016" observedRunningTime="2025-10-03 17:37:06.873878137 +0000 UTC m=+7745.839434770" watchObservedRunningTime="2025-10-03 17:37:06.877063588 +0000 UTC m=+7745.842620191" Oct 03 17:37:11 crc kubenswrapper[5081]: I1003 17:37:11.907774 5081 generic.go:334] "Generic (PLEG): container finished" podID="a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" containerID="510c646ecf1ed6e288e97f4d2a4cc519febe710e57298b599e435a5178d0cc4e" exitCode=0 Oct 03 17:37:11 crc kubenswrapper[5081]: I1003 17:37:11.907880 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" event={"ID":"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f","Type":"ContainerDied","Data":"510c646ecf1ed6e288e97f4d2a4cc519febe710e57298b599e435a5178d0cc4e"} Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.520922 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.590109 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ceph\") pod \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.590343 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-inventory\") pod \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.590367 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ssh-key\") pod \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.590399 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqkr2\" (UniqueName: \"kubernetes.io/projected/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-kube-api-access-dqkr2\") pod \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\" (UID: \"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f\") " Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.601833 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ceph" (OuterVolumeSpecName: "ceph") pod "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" (UID: "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.604159 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-kube-api-access-dqkr2" (OuterVolumeSpecName: "kube-api-access-dqkr2") pod "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" (UID: "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f"). InnerVolumeSpecName "kube-api-access-dqkr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.620775 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" (UID: "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.637092 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-inventory" (OuterVolumeSpecName: "inventory") pod "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" (UID: "a37cad0b-caa9-4c10-a77a-a805bdf7ea1f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.692321 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.692349 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.692362 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqkr2\" (UniqueName: \"kubernetes.io/projected/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-kube-api-access-dqkr2\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.692375 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a37cad0b-caa9-4c10-a77a-a805bdf7ea1f-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.930786 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" event={"ID":"a37cad0b-caa9-4c10-a77a-a805bdf7ea1f","Type":"ContainerDied","Data":"7210b996e7e78614745b209dc5f2476639fd811a35664b69ee9367f3e62f815c"} Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.931239 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7210b996e7e78614745b209dc5f2476639fd811a35664b69ee9367f3e62f815c" Oct 03 17:37:13 crc kubenswrapper[5081]: I1003 17:37:13.930807 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tvwmz" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.039243 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-xrjgj"] Oct 03 17:37:14 crc kubenswrapper[5081]: E1003 17:37:14.039768 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" containerName="ceph-client-openstack-openstack-cell1" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.039790 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" containerName="ceph-client-openstack-openstack-cell1" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.040633 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a37cad0b-caa9-4c10-a77a-a805bdf7ea1f" containerName="ceph-client-openstack-openstack-cell1" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.041485 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.044207 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.044480 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.045167 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.045245 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.051959 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.053346 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-xrjgj"] Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.104109 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.104220 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-inventory\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.104271 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ceph\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.104310 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt2b4\" (UniqueName: \"kubernetes.io/projected/cac5b3b6-e745-4550-82d3-49b8366b411d-kube-api-access-zt2b4\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.104343 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/cac5b3b6-e745-4550-82d3-49b8366b411d-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.104384 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ssh-key\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.206214 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-inventory\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.206281 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ceph\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.206323 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt2b4\" (UniqueName: \"kubernetes.io/projected/cac5b3b6-e745-4550-82d3-49b8366b411d-kube-api-access-zt2b4\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.206355 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/cac5b3b6-e745-4550-82d3-49b8366b411d-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.206397 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ssh-key\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.206422 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.207328 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/cac5b3b6-e745-4550-82d3-49b8366b411d-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.211135 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ceph\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.211203 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-inventory\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.211812 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.211958 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ssh-key\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.223353 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt2b4\" (UniqueName: \"kubernetes.io/projected/cac5b3b6-e745-4550-82d3-49b8366b411d-kube-api-access-zt2b4\") pod \"ovn-openstack-openstack-cell1-xrjgj\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.373276 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:37:14 crc kubenswrapper[5081]: I1003 17:37:14.990008 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-xrjgj"] Oct 03 17:37:15 crc kubenswrapper[5081]: I1003 17:37:15.961788 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" event={"ID":"cac5b3b6-e745-4550-82d3-49b8366b411d","Type":"ContainerStarted","Data":"ac33bcf69c9c312ad3a7da484c1dd1db092b7c22cba75db89e1cec0a4c2429f6"} Oct 03 17:37:16 crc kubenswrapper[5081]: I1003 17:37:16.977610 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" event={"ID":"cac5b3b6-e745-4550-82d3-49b8366b411d","Type":"ContainerStarted","Data":"046dcc5eac68c4d41d614d3bc4ca99fc5108579aafc723dc164305e8ff8cfbed"} Oct 03 17:37:16 crc kubenswrapper[5081]: I1003 17:37:16.996946 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" podStartSLOduration=2.26544648 podStartE2EDuration="2.99692612s" podCreationTimestamp="2025-10-03 17:37:14 +0000 UTC" firstStartedPulling="2025-10-03 17:37:14.997996879 +0000 UTC m=+7753.963553492" lastFinishedPulling="2025-10-03 17:37:15.729476509 +0000 UTC m=+7754.695033132" observedRunningTime="2025-10-03 17:37:16.992878364 +0000 UTC m=+7755.958434997" watchObservedRunningTime="2025-10-03 17:37:16.99692612 +0000 UTC m=+7755.962482743" Oct 03 17:37:30 crc kubenswrapper[5081]: I1003 17:37:30.647548 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:37:30 crc kubenswrapper[5081]: I1003 17:37:30.648166 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:38:00 crc kubenswrapper[5081]: I1003 17:38:00.647896 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:38:00 crc kubenswrapper[5081]: I1003 17:38:00.648360 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:38:00 crc kubenswrapper[5081]: I1003 17:38:00.648402 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:38:00 crc kubenswrapper[5081]: I1003 17:38:00.649221 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:38:00 crc kubenswrapper[5081]: I1003 17:38:00.649281 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" gracePeriod=600 Oct 03 17:38:00 crc kubenswrapper[5081]: E1003 17:38:00.775603 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:38:01 crc kubenswrapper[5081]: I1003 17:38:01.457155 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" exitCode=0 Oct 03 17:38:01 crc kubenswrapper[5081]: I1003 17:38:01.457555 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75"} Oct 03 17:38:01 crc kubenswrapper[5081]: I1003 17:38:01.457649 5081 scope.go:117] "RemoveContainer" containerID="2733c2283282a30c156dd94be5629e180dfd2de2462d99bd2c6a54fef85d7766" Oct 03 17:38:01 crc kubenswrapper[5081]: I1003 17:38:01.458920 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:38:01 crc kubenswrapper[5081]: E1003 17:38:01.459496 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:38:14 crc kubenswrapper[5081]: I1003 17:38:14.828789 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:38:14 crc kubenswrapper[5081]: E1003 17:38:14.829590 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:38:19 crc kubenswrapper[5081]: I1003 17:38:19.647785 5081 generic.go:334] "Generic (PLEG): container finished" podID="cac5b3b6-e745-4550-82d3-49b8366b411d" containerID="046dcc5eac68c4d41d614d3bc4ca99fc5108579aafc723dc164305e8ff8cfbed" exitCode=0 Oct 03 17:38:19 crc kubenswrapper[5081]: I1003 17:38:19.647869 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" event={"ID":"cac5b3b6-e745-4550-82d3-49b8366b411d","Type":"ContainerDied","Data":"046dcc5eac68c4d41d614d3bc4ca99fc5108579aafc723dc164305e8ff8cfbed"} Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.073918 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.222080 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ssh-key\") pod \"cac5b3b6-e745-4550-82d3-49b8366b411d\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.222136 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ceph\") pod \"cac5b3b6-e745-4550-82d3-49b8366b411d\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.222294 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zt2b4\" (UniqueName: \"kubernetes.io/projected/cac5b3b6-e745-4550-82d3-49b8366b411d-kube-api-access-zt2b4\") pod \"cac5b3b6-e745-4550-82d3-49b8366b411d\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.222411 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-inventory\") pod \"cac5b3b6-e745-4550-82d3-49b8366b411d\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.222453 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ovn-combined-ca-bundle\") pod \"cac5b3b6-e745-4550-82d3-49b8366b411d\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.222556 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/cac5b3b6-e745-4550-82d3-49b8366b411d-ovncontroller-config-0\") pod \"cac5b3b6-e745-4550-82d3-49b8366b411d\" (UID: \"cac5b3b6-e745-4550-82d3-49b8366b411d\") " Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.228432 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cac5b3b6-e745-4550-82d3-49b8366b411d-kube-api-access-zt2b4" (OuterVolumeSpecName: "kube-api-access-zt2b4") pod "cac5b3b6-e745-4550-82d3-49b8366b411d" (UID: "cac5b3b6-e745-4550-82d3-49b8366b411d"). InnerVolumeSpecName "kube-api-access-zt2b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.228520 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ceph" (OuterVolumeSpecName: "ceph") pod "cac5b3b6-e745-4550-82d3-49b8366b411d" (UID: "cac5b3b6-e745-4550-82d3-49b8366b411d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.232731 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "cac5b3b6-e745-4550-82d3-49b8366b411d" (UID: "cac5b3b6-e745-4550-82d3-49b8366b411d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.257844 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cac5b3b6-e745-4550-82d3-49b8366b411d" (UID: "cac5b3b6-e745-4550-82d3-49b8366b411d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.259514 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-inventory" (OuterVolumeSpecName: "inventory") pod "cac5b3b6-e745-4550-82d3-49b8366b411d" (UID: "cac5b3b6-e745-4550-82d3-49b8366b411d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.263388 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cac5b3b6-e745-4550-82d3-49b8366b411d-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "cac5b3b6-e745-4550-82d3-49b8366b411d" (UID: "cac5b3b6-e745-4550-82d3-49b8366b411d"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.326168 5081 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/cac5b3b6-e745-4550-82d3-49b8366b411d-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.326198 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.326208 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.326219 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zt2b4\" (UniqueName: \"kubernetes.io/projected/cac5b3b6-e745-4550-82d3-49b8366b411d-kube-api-access-zt2b4\") on node \"crc\" DevicePath \"\"" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.326229 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.326237 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cac5b3b6-e745-4550-82d3-49b8366b411d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.666772 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" event={"ID":"cac5b3b6-e745-4550-82d3-49b8366b411d","Type":"ContainerDied","Data":"ac33bcf69c9c312ad3a7da484c1dd1db092b7c22cba75db89e1cec0a4c2429f6"} Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.667225 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac33bcf69c9c312ad3a7da484c1dd1db092b7c22cba75db89e1cec0a4c2429f6" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.666852 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-xrjgj" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.754289 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-fz5cs"] Oct 03 17:38:21 crc kubenswrapper[5081]: E1003 17:38:21.754772 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cac5b3b6-e745-4550-82d3-49b8366b411d" containerName="ovn-openstack-openstack-cell1" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.754788 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cac5b3b6-e745-4550-82d3-49b8366b411d" containerName="ovn-openstack-openstack-cell1" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.755055 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cac5b3b6-e745-4550-82d3-49b8366b411d" containerName="ovn-openstack-openstack-cell1" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.755816 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.757828 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.758533 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.759666 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.759678 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.760324 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.771839 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.775507 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-fz5cs"] Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.949875 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.949988 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8nsz\" (UniqueName: \"kubernetes.io/projected/e1230e1d-eabc-4a43-b030-51403a3c15e8-kube-api-access-m8nsz\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.950018 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.950047 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.950081 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.950134 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:21 crc kubenswrapper[5081]: I1003 17:38:21.950175 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.052110 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.052233 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8nsz\" (UniqueName: \"kubernetes.io/projected/e1230e1d-eabc-4a43-b030-51403a3c15e8-kube-api-access-m8nsz\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.052268 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.052301 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.052348 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.052407 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.052451 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.060466 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.060592 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.060665 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.061363 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.061976 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.062168 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.076156 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8nsz\" (UniqueName: \"kubernetes.io/projected/e1230e1d-eabc-4a43-b030-51403a3c15e8-kube-api-access-m8nsz\") pod \"neutron-metadata-openstack-openstack-cell1-fz5cs\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.076737 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.655218 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-fz5cs"] Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.661987 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:38:22 crc kubenswrapper[5081]: I1003 17:38:22.676156 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" event={"ID":"e1230e1d-eabc-4a43-b030-51403a3c15e8","Type":"ContainerStarted","Data":"9adf9370351984f38e0b5d40022730d0f2665ece7078c36fe09871a16a9e2b1e"} Oct 03 17:38:23 crc kubenswrapper[5081]: I1003 17:38:23.697160 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" event={"ID":"e1230e1d-eabc-4a43-b030-51403a3c15e8","Type":"ContainerStarted","Data":"afa781321de8e03ae7eaba3a1bfff47fbb43131d0c61b2b5df1d043d9b89c6c5"} Oct 03 17:38:23 crc kubenswrapper[5081]: I1003 17:38:23.717814 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" podStartSLOduration=2.166589622 podStartE2EDuration="2.717793181s" podCreationTimestamp="2025-10-03 17:38:21 +0000 UTC" firstStartedPulling="2025-10-03 17:38:22.661720249 +0000 UTC m=+7821.627276862" lastFinishedPulling="2025-10-03 17:38:23.212923818 +0000 UTC m=+7822.178480421" observedRunningTime="2025-10-03 17:38:23.711428359 +0000 UTC m=+7822.676984992" watchObservedRunningTime="2025-10-03 17:38:23.717793181 +0000 UTC m=+7822.683349794" Oct 03 17:38:28 crc kubenswrapper[5081]: I1003 17:38:28.827277 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:38:28 crc kubenswrapper[5081]: E1003 17:38:28.828179 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:38:40 crc kubenswrapper[5081]: I1003 17:38:40.828505 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:38:40 crc kubenswrapper[5081]: E1003 17:38:40.829492 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.403780 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pl7gh"] Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.410605 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.421475 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pl7gh"] Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.570478 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-utilities\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.570858 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-catalog-content\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.570944 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n66q9\" (UniqueName: \"kubernetes.io/projected/200d8e0f-716b-4636-a10e-df3a44869cf0-kube-api-access-n66q9\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.673937 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-catalog-content\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.674003 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n66q9\" (UniqueName: \"kubernetes.io/projected/200d8e0f-716b-4636-a10e-df3a44869cf0-kube-api-access-n66q9\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.674150 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-utilities\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.674644 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-utilities\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.674676 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-catalog-content\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.705129 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n66q9\" (UniqueName: \"kubernetes.io/projected/200d8e0f-716b-4636-a10e-df3a44869cf0-kube-api-access-n66q9\") pod \"community-operators-pl7gh\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:52 crc kubenswrapper[5081]: I1003 17:38:52.743009 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:38:53 crc kubenswrapper[5081]: I1003 17:38:53.254539 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pl7gh"] Oct 03 17:38:54 crc kubenswrapper[5081]: I1003 17:38:54.028938 5081 generic.go:334] "Generic (PLEG): container finished" podID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerID="914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e" exitCode=0 Oct 03 17:38:54 crc kubenswrapper[5081]: I1003 17:38:54.029423 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pl7gh" event={"ID":"200d8e0f-716b-4636-a10e-df3a44869cf0","Type":"ContainerDied","Data":"914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e"} Oct 03 17:38:54 crc kubenswrapper[5081]: I1003 17:38:54.029455 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pl7gh" event={"ID":"200d8e0f-716b-4636-a10e-df3a44869cf0","Type":"ContainerStarted","Data":"eafa1d29b5a35e1fb6a6ecd2806eed4d38093046268da979c9925a9df7a2d660"} Oct 03 17:38:54 crc kubenswrapper[5081]: I1003 17:38:54.827768 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:38:54 crc kubenswrapper[5081]: E1003 17:38:54.828106 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:38:56 crc kubenswrapper[5081]: I1003 17:38:56.053958 5081 generic.go:334] "Generic (PLEG): container finished" podID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerID="bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5" exitCode=0 Oct 03 17:38:56 crc kubenswrapper[5081]: I1003 17:38:56.054014 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pl7gh" event={"ID":"200d8e0f-716b-4636-a10e-df3a44869cf0","Type":"ContainerDied","Data":"bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5"} Oct 03 17:38:57 crc kubenswrapper[5081]: I1003 17:38:57.075307 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pl7gh" event={"ID":"200d8e0f-716b-4636-a10e-df3a44869cf0","Type":"ContainerStarted","Data":"4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc"} Oct 03 17:38:57 crc kubenswrapper[5081]: I1003 17:38:57.097884 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pl7gh" podStartSLOduration=2.566995504 podStartE2EDuration="5.097863453s" podCreationTimestamp="2025-10-03 17:38:52 +0000 UTC" firstStartedPulling="2025-10-03 17:38:54.035969923 +0000 UTC m=+7853.001526536" lastFinishedPulling="2025-10-03 17:38:56.566837852 +0000 UTC m=+7855.532394485" observedRunningTime="2025-10-03 17:38:57.09248988 +0000 UTC m=+7856.058046493" watchObservedRunningTime="2025-10-03 17:38:57.097863453 +0000 UTC m=+7856.063420066" Oct 03 17:39:02 crc kubenswrapper[5081]: I1003 17:39:02.743918 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:39:02 crc kubenswrapper[5081]: I1003 17:39:02.744469 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:39:02 crc kubenswrapper[5081]: I1003 17:39:02.815149 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:39:03 crc kubenswrapper[5081]: I1003 17:39:03.198684 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:39:03 crc kubenswrapper[5081]: I1003 17:39:03.252226 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pl7gh"] Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.168957 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pl7gh" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="registry-server" containerID="cri-o://4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc" gracePeriod=2 Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.690415 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.804448 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n66q9\" (UniqueName: \"kubernetes.io/projected/200d8e0f-716b-4636-a10e-df3a44869cf0-kube-api-access-n66q9\") pod \"200d8e0f-716b-4636-a10e-df3a44869cf0\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.804933 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-utilities\") pod \"200d8e0f-716b-4636-a10e-df3a44869cf0\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.804956 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-catalog-content\") pod \"200d8e0f-716b-4636-a10e-df3a44869cf0\" (UID: \"200d8e0f-716b-4636-a10e-df3a44869cf0\") " Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.806006 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-utilities" (OuterVolumeSpecName: "utilities") pod "200d8e0f-716b-4636-a10e-df3a44869cf0" (UID: "200d8e0f-716b-4636-a10e-df3a44869cf0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.812885 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/200d8e0f-716b-4636-a10e-df3a44869cf0-kube-api-access-n66q9" (OuterVolumeSpecName: "kube-api-access-n66q9") pod "200d8e0f-716b-4636-a10e-df3a44869cf0" (UID: "200d8e0f-716b-4636-a10e-df3a44869cf0"). InnerVolumeSpecName "kube-api-access-n66q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.861963 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "200d8e0f-716b-4636-a10e-df3a44869cf0" (UID: "200d8e0f-716b-4636-a10e-df3a44869cf0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.907513 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n66q9\" (UniqueName: \"kubernetes.io/projected/200d8e0f-716b-4636-a10e-df3a44869cf0-kube-api-access-n66q9\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.907552 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:05 crc kubenswrapper[5081]: I1003 17:39:05.907579 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200d8e0f-716b-4636-a10e-df3a44869cf0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.180446 5081 generic.go:334] "Generic (PLEG): container finished" podID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerID="4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc" exitCode=0 Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.180482 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pl7gh" event={"ID":"200d8e0f-716b-4636-a10e-df3a44869cf0","Type":"ContainerDied","Data":"4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc"} Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.180500 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pl7gh" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.180519 5081 scope.go:117] "RemoveContainer" containerID="4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.180507 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pl7gh" event={"ID":"200d8e0f-716b-4636-a10e-df3a44869cf0","Type":"ContainerDied","Data":"eafa1d29b5a35e1fb6a6ecd2806eed4d38093046268da979c9925a9df7a2d660"} Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.226694 5081 scope.go:117] "RemoveContainer" containerID="bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.237128 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pl7gh"] Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.251039 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pl7gh"] Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.251075 5081 scope.go:117] "RemoveContainer" containerID="914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.296352 5081 scope.go:117] "RemoveContainer" containerID="4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc" Oct 03 17:39:06 crc kubenswrapper[5081]: E1003 17:39:06.296983 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc\": container with ID starting with 4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc not found: ID does not exist" containerID="4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.297022 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc"} err="failed to get container status \"4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc\": rpc error: code = NotFound desc = could not find container \"4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc\": container with ID starting with 4026b7858d35e8e94205c8b1203f8d1cd473609348279966684305893d1bfbdc not found: ID does not exist" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.297047 5081 scope.go:117] "RemoveContainer" containerID="bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5" Oct 03 17:39:06 crc kubenswrapper[5081]: E1003 17:39:06.297409 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5\": container with ID starting with bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5 not found: ID does not exist" containerID="bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.297461 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5"} err="failed to get container status \"bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5\": rpc error: code = NotFound desc = could not find container \"bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5\": container with ID starting with bf9ae548ef45913338e563946b31971dd7e91c600f9be83d8531fe120c97e3f5 not found: ID does not exist" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.297498 5081 scope.go:117] "RemoveContainer" containerID="914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e" Oct 03 17:39:06 crc kubenswrapper[5081]: E1003 17:39:06.297975 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e\": container with ID starting with 914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e not found: ID does not exist" containerID="914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.298004 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e"} err="failed to get container status \"914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e\": rpc error: code = NotFound desc = could not find container \"914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e\": container with ID starting with 914f4077b0b4b76eb07d8c105919fd1042053e1d2e17afd7db1bff50f6c3d95e not found: ID does not exist" Oct 03 17:39:06 crc kubenswrapper[5081]: I1003 17:39:06.828399 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:39:06 crc kubenswrapper[5081]: E1003 17:39:06.829115 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:39:07 crc kubenswrapper[5081]: I1003 17:39:07.848885 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" path="/var/lib/kubelet/pods/200d8e0f-716b-4636-a10e-df3a44869cf0/volumes" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.791694 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rrwqc"] Oct 03 17:39:14 crc kubenswrapper[5081]: E1003 17:39:14.792672 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="extract-content" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.792685 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="extract-content" Oct 03 17:39:14 crc kubenswrapper[5081]: E1003 17:39:14.792721 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="extract-utilities" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.792727 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="extract-utilities" Oct 03 17:39:14 crc kubenswrapper[5081]: E1003 17:39:14.792752 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="registry-server" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.792758 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="registry-server" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.792964 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="200d8e0f-716b-4636-a10e-df3a44869cf0" containerName="registry-server" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.794571 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.807816 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rrwqc"] Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.908880 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-catalog-content\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.909290 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-utilities\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:14 crc kubenswrapper[5081]: I1003 17:39:14.909388 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrbwn\" (UniqueName: \"kubernetes.io/projected/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-kube-api-access-vrbwn\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.011102 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-utilities\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.011206 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrbwn\" (UniqueName: \"kubernetes.io/projected/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-kube-api-access-vrbwn\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.011668 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-utilities\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.011707 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-catalog-content\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.012004 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-catalog-content\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.031395 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrbwn\" (UniqueName: \"kubernetes.io/projected/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-kube-api-access-vrbwn\") pod \"certified-operators-rrwqc\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.125697 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:15 crc kubenswrapper[5081]: I1003 17:39:15.682331 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rrwqc"] Oct 03 17:39:16 crc kubenswrapper[5081]: I1003 17:39:16.305387 5081 generic.go:334] "Generic (PLEG): container finished" podID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerID="e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6" exitCode=0 Oct 03 17:39:16 crc kubenswrapper[5081]: I1003 17:39:16.305469 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rrwqc" event={"ID":"585afb5f-69a0-46d4-8e38-b1e0cede8bcf","Type":"ContainerDied","Data":"e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6"} Oct 03 17:39:16 crc kubenswrapper[5081]: I1003 17:39:16.305687 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rrwqc" event={"ID":"585afb5f-69a0-46d4-8e38-b1e0cede8bcf","Type":"ContainerStarted","Data":"783292c455aa3dad47cf31807818bd03aca7e49f85d95c2cc907e12b6888f404"} Oct 03 17:39:17 crc kubenswrapper[5081]: I1003 17:39:17.318172 5081 generic.go:334] "Generic (PLEG): container finished" podID="e1230e1d-eabc-4a43-b030-51403a3c15e8" containerID="afa781321de8e03ae7eaba3a1bfff47fbb43131d0c61b2b5df1d043d9b89c6c5" exitCode=0 Oct 03 17:39:17 crc kubenswrapper[5081]: I1003 17:39:17.318271 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" event={"ID":"e1230e1d-eabc-4a43-b030-51403a3c15e8","Type":"ContainerDied","Data":"afa781321de8e03ae7eaba3a1bfff47fbb43131d0c61b2b5df1d043d9b89c6c5"} Oct 03 17:39:17 crc kubenswrapper[5081]: I1003 17:39:17.322169 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rrwqc" event={"ID":"585afb5f-69a0-46d4-8e38-b1e0cede8bcf","Type":"ContainerStarted","Data":"a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf"} Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.335545 5081 generic.go:334] "Generic (PLEG): container finished" podID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerID="a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf" exitCode=0 Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.335655 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rrwqc" event={"ID":"585afb5f-69a0-46d4-8e38-b1e0cede8bcf","Type":"ContainerDied","Data":"a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf"} Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.818659 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.828841 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:39:18 crc kubenswrapper[5081]: E1003 17:39:18.829200 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.899921 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"e1230e1d-eabc-4a43-b030-51403a3c15e8\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.900054 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8nsz\" (UniqueName: \"kubernetes.io/projected/e1230e1d-eabc-4a43-b030-51403a3c15e8-kube-api-access-m8nsz\") pod \"e1230e1d-eabc-4a43-b030-51403a3c15e8\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.900205 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ceph\") pod \"e1230e1d-eabc-4a43-b030-51403a3c15e8\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.900307 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-inventory\") pod \"e1230e1d-eabc-4a43-b030-51403a3c15e8\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.900356 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-metadata-combined-ca-bundle\") pod \"e1230e1d-eabc-4a43-b030-51403a3c15e8\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.900432 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ssh-key\") pod \"e1230e1d-eabc-4a43-b030-51403a3c15e8\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.900484 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-nova-metadata-neutron-config-0\") pod \"e1230e1d-eabc-4a43-b030-51403a3c15e8\" (UID: \"e1230e1d-eabc-4a43-b030-51403a3c15e8\") " Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.909710 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ceph" (OuterVolumeSpecName: "ceph") pod "e1230e1d-eabc-4a43-b030-51403a3c15e8" (UID: "e1230e1d-eabc-4a43-b030-51403a3c15e8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.909879 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1230e1d-eabc-4a43-b030-51403a3c15e8-kube-api-access-m8nsz" (OuterVolumeSpecName: "kube-api-access-m8nsz") pod "e1230e1d-eabc-4a43-b030-51403a3c15e8" (UID: "e1230e1d-eabc-4a43-b030-51403a3c15e8"). InnerVolumeSpecName "kube-api-access-m8nsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.913780 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "e1230e1d-eabc-4a43-b030-51403a3c15e8" (UID: "e1230e1d-eabc-4a43-b030-51403a3c15e8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.931401 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "e1230e1d-eabc-4a43-b030-51403a3c15e8" (UID: "e1230e1d-eabc-4a43-b030-51403a3c15e8"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.936304 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "e1230e1d-eabc-4a43-b030-51403a3c15e8" (UID: "e1230e1d-eabc-4a43-b030-51403a3c15e8"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.940139 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-inventory" (OuterVolumeSpecName: "inventory") pod "e1230e1d-eabc-4a43-b030-51403a3c15e8" (UID: "e1230e1d-eabc-4a43-b030-51403a3c15e8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:39:18 crc kubenswrapper[5081]: I1003 17:39:18.944229 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e1230e1d-eabc-4a43-b030-51403a3c15e8" (UID: "e1230e1d-eabc-4a43-b030-51403a3c15e8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.004797 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.004832 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.004849 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.004862 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.004875 5081 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.004892 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e1230e1d-eabc-4a43-b030-51403a3c15e8-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.004906 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8nsz\" (UniqueName: \"kubernetes.io/projected/e1230e1d-eabc-4a43-b030-51403a3c15e8-kube-api-access-m8nsz\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.346994 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.347001 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-fz5cs" event={"ID":"e1230e1d-eabc-4a43-b030-51403a3c15e8","Type":"ContainerDied","Data":"9adf9370351984f38e0b5d40022730d0f2665ece7078c36fe09871a16a9e2b1e"} Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.347456 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9adf9370351984f38e0b5d40022730d0f2665ece7078c36fe09871a16a9e2b1e" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.352701 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rrwqc" event={"ID":"585afb5f-69a0-46d4-8e38-b1e0cede8bcf","Type":"ContainerStarted","Data":"c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f"} Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.384114 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rrwqc" podStartSLOduration=2.814314149 podStartE2EDuration="5.384092901s" podCreationTimestamp="2025-10-03 17:39:14 +0000 UTC" firstStartedPulling="2025-10-03 17:39:16.308763617 +0000 UTC m=+7875.274320230" lastFinishedPulling="2025-10-03 17:39:18.878542349 +0000 UTC m=+7877.844098982" observedRunningTime="2025-10-03 17:39:19.371396758 +0000 UTC m=+7878.336953361" watchObservedRunningTime="2025-10-03 17:39:19.384092901 +0000 UTC m=+7878.349649514" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.432898 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-5d9kd"] Oct 03 17:39:19 crc kubenswrapper[5081]: E1003 17:39:19.433463 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1230e1d-eabc-4a43-b030-51403a3c15e8" containerName="neutron-metadata-openstack-openstack-cell1" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.433488 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1230e1d-eabc-4a43-b030-51403a3c15e8" containerName="neutron-metadata-openstack-openstack-cell1" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.433788 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1230e1d-eabc-4a43-b030-51403a3c15e8" containerName="neutron-metadata-openstack-openstack-cell1" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.434985 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.437637 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.437821 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.438010 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.438019 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.438126 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.446796 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-5d9kd"] Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.515027 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ssh-key\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.515064 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.515097 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.515202 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-inventory\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.515257 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drsjl\" (UniqueName: \"kubernetes.io/projected/7014f4b4-d256-4a19-9e35-1e3afc33fd76-kube-api-access-drsjl\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.515280 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ceph\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.617501 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-inventory\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.617702 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drsjl\" (UniqueName: \"kubernetes.io/projected/7014f4b4-d256-4a19-9e35-1e3afc33fd76-kube-api-access-drsjl\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.617743 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ceph\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.617824 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ssh-key\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.617853 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.617916 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.621578 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ceph\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.622736 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.623307 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.624929 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ssh-key\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.626312 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-inventory\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.645365 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drsjl\" (UniqueName: \"kubernetes.io/projected/7014f4b4-d256-4a19-9e35-1e3afc33fd76-kube-api-access-drsjl\") pod \"libvirt-openstack-openstack-cell1-5d9kd\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:19 crc kubenswrapper[5081]: I1003 17:39:19.760913 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:39:20 crc kubenswrapper[5081]: I1003 17:39:20.307481 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-5d9kd"] Oct 03 17:39:20 crc kubenswrapper[5081]: W1003 17:39:20.314819 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7014f4b4_d256_4a19_9e35_1e3afc33fd76.slice/crio-77b4b8882e38ed2b6d7e124f14c41634efbcae3e244245d8d1a5743871ce7c6a WatchSource:0}: Error finding container 77b4b8882e38ed2b6d7e124f14c41634efbcae3e244245d8d1a5743871ce7c6a: Status 404 returned error can't find the container with id 77b4b8882e38ed2b6d7e124f14c41634efbcae3e244245d8d1a5743871ce7c6a Oct 03 17:39:20 crc kubenswrapper[5081]: I1003 17:39:20.363267 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" event={"ID":"7014f4b4-d256-4a19-9e35-1e3afc33fd76","Type":"ContainerStarted","Data":"77b4b8882e38ed2b6d7e124f14c41634efbcae3e244245d8d1a5743871ce7c6a"} Oct 03 17:39:21 crc kubenswrapper[5081]: I1003 17:39:21.375883 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" event={"ID":"7014f4b4-d256-4a19-9e35-1e3afc33fd76","Type":"ContainerStarted","Data":"4116d7eb8458f4bfa76b1ffc8d17679f931bcf785feb6449a13367496b6b90ea"} Oct 03 17:39:21 crc kubenswrapper[5081]: I1003 17:39:21.407768 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" podStartSLOduration=1.993838692 podStartE2EDuration="2.407752581s" podCreationTimestamp="2025-10-03 17:39:19 +0000 UTC" firstStartedPulling="2025-10-03 17:39:20.317184602 +0000 UTC m=+7879.282741215" lastFinishedPulling="2025-10-03 17:39:20.731098491 +0000 UTC m=+7879.696655104" observedRunningTime="2025-10-03 17:39:21.400216226 +0000 UTC m=+7880.365772839" watchObservedRunningTime="2025-10-03 17:39:21.407752581 +0000 UTC m=+7880.373309194" Oct 03 17:39:25 crc kubenswrapper[5081]: I1003 17:39:25.126794 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:25 crc kubenswrapper[5081]: I1003 17:39:25.129037 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:25 crc kubenswrapper[5081]: I1003 17:39:25.212093 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:25 crc kubenswrapper[5081]: I1003 17:39:25.489105 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:25 crc kubenswrapper[5081]: I1003 17:39:25.561695 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rrwqc"] Oct 03 17:39:27 crc kubenswrapper[5081]: I1003 17:39:27.448431 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rrwqc" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="registry-server" containerID="cri-o://c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f" gracePeriod=2 Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.005536 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.132734 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-utilities\") pod \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.132874 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-catalog-content\") pod \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.132965 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrbwn\" (UniqueName: \"kubernetes.io/projected/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-kube-api-access-vrbwn\") pod \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\" (UID: \"585afb5f-69a0-46d4-8e38-b1e0cede8bcf\") " Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.133951 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-utilities" (OuterVolumeSpecName: "utilities") pod "585afb5f-69a0-46d4-8e38-b1e0cede8bcf" (UID: "585afb5f-69a0-46d4-8e38-b1e0cede8bcf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.139308 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-kube-api-access-vrbwn" (OuterVolumeSpecName: "kube-api-access-vrbwn") pod "585afb5f-69a0-46d4-8e38-b1e0cede8bcf" (UID: "585afb5f-69a0-46d4-8e38-b1e0cede8bcf"). InnerVolumeSpecName "kube-api-access-vrbwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.235802 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.236194 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrbwn\" (UniqueName: \"kubernetes.io/projected/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-kube-api-access-vrbwn\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.269448 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "585afb5f-69a0-46d4-8e38-b1e0cede8bcf" (UID: "585afb5f-69a0-46d4-8e38-b1e0cede8bcf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.339455 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585afb5f-69a0-46d4-8e38-b1e0cede8bcf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.462405 5081 generic.go:334] "Generic (PLEG): container finished" podID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerID="c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f" exitCode=0 Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.462447 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rrwqc" event={"ID":"585afb5f-69a0-46d4-8e38-b1e0cede8bcf","Type":"ContainerDied","Data":"c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f"} Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.462472 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rrwqc" event={"ID":"585afb5f-69a0-46d4-8e38-b1e0cede8bcf","Type":"ContainerDied","Data":"783292c455aa3dad47cf31807818bd03aca7e49f85d95c2cc907e12b6888f404"} Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.462491 5081 scope.go:117] "RemoveContainer" containerID="c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.462515 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rrwqc" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.492541 5081 scope.go:117] "RemoveContainer" containerID="a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.522505 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rrwqc"] Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.535155 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rrwqc"] Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.541406 5081 scope.go:117] "RemoveContainer" containerID="e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.578525 5081 scope.go:117] "RemoveContainer" containerID="c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f" Oct 03 17:39:28 crc kubenswrapper[5081]: E1003 17:39:28.584069 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f\": container with ID starting with c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f not found: ID does not exist" containerID="c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.584114 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f"} err="failed to get container status \"c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f\": rpc error: code = NotFound desc = could not find container \"c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f\": container with ID starting with c6cc4e35ee04eb1733d0df525a91eadbd92e25e4b75238b4ca2e9974b019ec1f not found: ID does not exist" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.584144 5081 scope.go:117] "RemoveContainer" containerID="a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf" Oct 03 17:39:28 crc kubenswrapper[5081]: E1003 17:39:28.584932 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf\": container with ID starting with a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf not found: ID does not exist" containerID="a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.585025 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf"} err="failed to get container status \"a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf\": rpc error: code = NotFound desc = could not find container \"a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf\": container with ID starting with a8c5b76c5a11b55c929ed77afd0d168c3c9e1c073cb4b514c687b6f6123144bf not found: ID does not exist" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.585057 5081 scope.go:117] "RemoveContainer" containerID="e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6" Oct 03 17:39:28 crc kubenswrapper[5081]: E1003 17:39:28.585374 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6\": container with ID starting with e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6 not found: ID does not exist" containerID="e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6" Oct 03 17:39:28 crc kubenswrapper[5081]: I1003 17:39:28.585409 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6"} err="failed to get container status \"e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6\": rpc error: code = NotFound desc = could not find container \"e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6\": container with ID starting with e77f60e81ab1cb7d4e3811c7137648e03a1d4644127b825d50af1e923fbb61c6 not found: ID does not exist" Oct 03 17:39:29 crc kubenswrapper[5081]: I1003 17:39:29.845192 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" path="/var/lib/kubelet/pods/585afb5f-69a0-46d4-8e38-b1e0cede8bcf/volumes" Oct 03 17:39:32 crc kubenswrapper[5081]: I1003 17:39:32.827661 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:39:32 crc kubenswrapper[5081]: E1003 17:39:32.828410 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:39:45 crc kubenswrapper[5081]: I1003 17:39:45.832090 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:39:45 crc kubenswrapper[5081]: E1003 17:39:45.832949 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:40:00 crc kubenswrapper[5081]: I1003 17:40:00.828354 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:40:00 crc kubenswrapper[5081]: E1003 17:40:00.829590 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:40:11 crc kubenswrapper[5081]: I1003 17:40:11.834341 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:40:11 crc kubenswrapper[5081]: E1003 17:40:11.835169 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:40:25 crc kubenswrapper[5081]: I1003 17:40:25.827754 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:40:25 crc kubenswrapper[5081]: E1003 17:40:25.828872 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:40:38 crc kubenswrapper[5081]: I1003 17:40:38.828549 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:40:38 crc kubenswrapper[5081]: E1003 17:40:38.829523 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:40:49 crc kubenswrapper[5081]: I1003 17:40:49.828026 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:40:49 crc kubenswrapper[5081]: E1003 17:40:49.828820 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:41:01 crc kubenswrapper[5081]: I1003 17:41:01.838185 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:41:01 crc kubenswrapper[5081]: E1003 17:41:01.839136 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:41:16 crc kubenswrapper[5081]: I1003 17:41:16.828736 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:41:16 crc kubenswrapper[5081]: E1003 17:41:16.829898 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:41:27 crc kubenswrapper[5081]: I1003 17:41:27.827693 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:41:27 crc kubenswrapper[5081]: E1003 17:41:27.828427 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:41:39 crc kubenswrapper[5081]: I1003 17:41:39.828769 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:41:39 crc kubenswrapper[5081]: E1003 17:41:39.830465 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:41:52 crc kubenswrapper[5081]: I1003 17:41:52.828549 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:41:52 crc kubenswrapper[5081]: E1003 17:41:52.829402 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.498105 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2b4cj"] Oct 03 17:42:02 crc kubenswrapper[5081]: E1003 17:42:02.499353 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="registry-server" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.499375 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="registry-server" Oct 03 17:42:02 crc kubenswrapper[5081]: E1003 17:42:02.499391 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="extract-content" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.499399 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="extract-content" Oct 03 17:42:02 crc kubenswrapper[5081]: E1003 17:42:02.499418 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="extract-utilities" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.499427 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="extract-utilities" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.499751 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="585afb5f-69a0-46d4-8e38-b1e0cede8bcf" containerName="registry-server" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.502066 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.534098 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2b4cj"] Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.573372 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-utilities\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.573603 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7j7t\" (UniqueName: \"kubernetes.io/projected/1fe4088d-2645-4ae6-862c-4a1848bf338a-kube-api-access-h7j7t\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.573857 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-catalog-content\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.676424 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-utilities\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.676995 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-utilities\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.677129 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7j7t\" (UniqueName: \"kubernetes.io/projected/1fe4088d-2645-4ae6-862c-4a1848bf338a-kube-api-access-h7j7t\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.677220 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-catalog-content\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.677539 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-catalog-content\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.701523 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7j7t\" (UniqueName: \"kubernetes.io/projected/1fe4088d-2645-4ae6-862c-4a1848bf338a-kube-api-access-h7j7t\") pod \"redhat-marketplace-2b4cj\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:02 crc kubenswrapper[5081]: I1003 17:42:02.843394 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:03 crc kubenswrapper[5081]: I1003 17:42:03.316786 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2b4cj"] Oct 03 17:42:04 crc kubenswrapper[5081]: I1003 17:42:04.286314 5081 generic.go:334] "Generic (PLEG): container finished" podID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerID="d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed" exitCode=0 Oct 03 17:42:04 crc kubenswrapper[5081]: I1003 17:42:04.286374 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2b4cj" event={"ID":"1fe4088d-2645-4ae6-862c-4a1848bf338a","Type":"ContainerDied","Data":"d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed"} Oct 03 17:42:04 crc kubenswrapper[5081]: I1003 17:42:04.286764 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2b4cj" event={"ID":"1fe4088d-2645-4ae6-862c-4a1848bf338a","Type":"ContainerStarted","Data":"5b9bd916512fe4135001e8598b3e5ebd1285cdbc4e13a47c121fae8b94919a67"} Oct 03 17:42:04 crc kubenswrapper[5081]: I1003 17:42:04.827487 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:42:04 crc kubenswrapper[5081]: E1003 17:42:04.828174 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:42:06 crc kubenswrapper[5081]: I1003 17:42:06.309182 5081 generic.go:334] "Generic (PLEG): container finished" podID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerID="28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6" exitCode=0 Oct 03 17:42:06 crc kubenswrapper[5081]: I1003 17:42:06.309302 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2b4cj" event={"ID":"1fe4088d-2645-4ae6-862c-4a1848bf338a","Type":"ContainerDied","Data":"28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6"} Oct 03 17:42:08 crc kubenswrapper[5081]: I1003 17:42:08.348271 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2b4cj" event={"ID":"1fe4088d-2645-4ae6-862c-4a1848bf338a","Type":"ContainerStarted","Data":"86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6"} Oct 03 17:42:08 crc kubenswrapper[5081]: I1003 17:42:08.372930 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2b4cj" podStartSLOduration=3.520852355 podStartE2EDuration="6.372912531s" podCreationTimestamp="2025-10-03 17:42:02 +0000 UTC" firstStartedPulling="2025-10-03 17:42:04.288329439 +0000 UTC m=+8043.253886052" lastFinishedPulling="2025-10-03 17:42:07.140389615 +0000 UTC m=+8046.105946228" observedRunningTime="2025-10-03 17:42:08.368776063 +0000 UTC m=+8047.334332726" watchObservedRunningTime="2025-10-03 17:42:08.372912531 +0000 UTC m=+8047.338469144" Oct 03 17:42:12 crc kubenswrapper[5081]: I1003 17:42:12.844112 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:12 crc kubenswrapper[5081]: I1003 17:42:12.844612 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:12 crc kubenswrapper[5081]: I1003 17:42:12.896175 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:13 crc kubenswrapper[5081]: I1003 17:42:13.457854 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:13 crc kubenswrapper[5081]: I1003 17:42:13.512640 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2b4cj"] Oct 03 17:42:15 crc kubenswrapper[5081]: I1003 17:42:15.416418 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2b4cj" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="registry-server" containerID="cri-o://86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6" gracePeriod=2 Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.009180 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.153910 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7j7t\" (UniqueName: \"kubernetes.io/projected/1fe4088d-2645-4ae6-862c-4a1848bf338a-kube-api-access-h7j7t\") pod \"1fe4088d-2645-4ae6-862c-4a1848bf338a\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.154051 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-utilities\") pod \"1fe4088d-2645-4ae6-862c-4a1848bf338a\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.154282 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-catalog-content\") pod \"1fe4088d-2645-4ae6-862c-4a1848bf338a\" (UID: \"1fe4088d-2645-4ae6-862c-4a1848bf338a\") " Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.154804 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-utilities" (OuterVolumeSpecName: "utilities") pod "1fe4088d-2645-4ae6-862c-4a1848bf338a" (UID: "1fe4088d-2645-4ae6-862c-4a1848bf338a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.159070 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fe4088d-2645-4ae6-862c-4a1848bf338a-kube-api-access-h7j7t" (OuterVolumeSpecName: "kube-api-access-h7j7t") pod "1fe4088d-2645-4ae6-862c-4a1848bf338a" (UID: "1fe4088d-2645-4ae6-862c-4a1848bf338a"). InnerVolumeSpecName "kube-api-access-h7j7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.168416 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1fe4088d-2645-4ae6-862c-4a1848bf338a" (UID: "1fe4088d-2645-4ae6-862c-4a1848bf338a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.257326 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.257378 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7j7t\" (UniqueName: \"kubernetes.io/projected/1fe4088d-2645-4ae6-862c-4a1848bf338a-kube-api-access-h7j7t\") on node \"crc\" DevicePath \"\"" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.257399 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fe4088d-2645-4ae6-862c-4a1848bf338a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.427537 5081 generic.go:334] "Generic (PLEG): container finished" podID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerID="86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6" exitCode=0 Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.427598 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2b4cj" event={"ID":"1fe4088d-2645-4ae6-862c-4a1848bf338a","Type":"ContainerDied","Data":"86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6"} Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.427614 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2b4cj" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.427630 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2b4cj" event={"ID":"1fe4088d-2645-4ae6-862c-4a1848bf338a","Type":"ContainerDied","Data":"5b9bd916512fe4135001e8598b3e5ebd1285cdbc4e13a47c121fae8b94919a67"} Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.427649 5081 scope.go:117] "RemoveContainer" containerID="86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.460004 5081 scope.go:117] "RemoveContainer" containerID="28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.468010 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2b4cj"] Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.478401 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2b4cj"] Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.490531 5081 scope.go:117] "RemoveContainer" containerID="d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.525149 5081 scope.go:117] "RemoveContainer" containerID="86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6" Oct 03 17:42:16 crc kubenswrapper[5081]: E1003 17:42:16.525670 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6\": container with ID starting with 86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6 not found: ID does not exist" containerID="86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.525773 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6"} err="failed to get container status \"86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6\": rpc error: code = NotFound desc = could not find container \"86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6\": container with ID starting with 86fede1042f95e62b9b6eae12e2d87df415eeaceeb4faf48f035f6475ee934d6 not found: ID does not exist" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.525853 5081 scope.go:117] "RemoveContainer" containerID="28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6" Oct 03 17:42:16 crc kubenswrapper[5081]: E1003 17:42:16.526148 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6\": container with ID starting with 28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6 not found: ID does not exist" containerID="28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.526197 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6"} err="failed to get container status \"28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6\": rpc error: code = NotFound desc = could not find container \"28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6\": container with ID starting with 28858a0a82d28e0c673daa79b987075ca94bc8646f72eb6d7d9192aa5520d5d6 not found: ID does not exist" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.526220 5081 scope.go:117] "RemoveContainer" containerID="d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed" Oct 03 17:42:16 crc kubenswrapper[5081]: E1003 17:42:16.526457 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed\": container with ID starting with d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed not found: ID does not exist" containerID="d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.526502 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed"} err="failed to get container status \"d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed\": rpc error: code = NotFound desc = could not find container \"d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed\": container with ID starting with d235b08ed44a79978bcbbc2faaeb204ed4e4f3ad943d770449b3d34dbe4481ed not found: ID does not exist" Oct 03 17:42:16 crc kubenswrapper[5081]: I1003 17:42:16.827636 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:42:16 crc kubenswrapper[5081]: E1003 17:42:16.829638 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:42:17 crc kubenswrapper[5081]: I1003 17:42:17.844283 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" path="/var/lib/kubelet/pods/1fe4088d-2645-4ae6-862c-4a1848bf338a/volumes" Oct 03 17:42:29 crc kubenswrapper[5081]: I1003 17:42:29.828296 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:42:29 crc kubenswrapper[5081]: E1003 17:42:29.830653 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:42:42 crc kubenswrapper[5081]: I1003 17:42:42.828611 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:42:42 crc kubenswrapper[5081]: E1003 17:42:42.829355 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:42:55 crc kubenswrapper[5081]: I1003 17:42:55.828142 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:42:55 crc kubenswrapper[5081]: E1003 17:42:55.828851 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:43:08 crc kubenswrapper[5081]: I1003 17:43:08.829108 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:43:10 crc kubenswrapper[5081]: I1003 17:43:10.005939 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"a788edfbd2ea543eb0e0cc7f5af50f5657ccf2b2f7c993612d0aeb19f1ad91a9"} Oct 03 17:44:01 crc kubenswrapper[5081]: I1003 17:44:01.629451 5081 generic.go:334] "Generic (PLEG): container finished" podID="7014f4b4-d256-4a19-9e35-1e3afc33fd76" containerID="4116d7eb8458f4bfa76b1ffc8d17679f931bcf785feb6449a13367496b6b90ea" exitCode=0 Oct 03 17:44:01 crc kubenswrapper[5081]: I1003 17:44:01.629629 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" event={"ID":"7014f4b4-d256-4a19-9e35-1e3afc33fd76","Type":"ContainerDied","Data":"4116d7eb8458f4bfa76b1ffc8d17679f931bcf785feb6449a13367496b6b90ea"} Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.068670 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.209443 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-inventory\") pod \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.209849 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ceph\") pod \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.209925 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-combined-ca-bundle\") pod \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.209971 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drsjl\" (UniqueName: \"kubernetes.io/projected/7014f4b4-d256-4a19-9e35-1e3afc33fd76-kube-api-access-drsjl\") pod \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.210044 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ssh-key\") pod \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.210084 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-secret-0\") pod \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\" (UID: \"7014f4b4-d256-4a19-9e35-1e3afc33fd76\") " Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.215812 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ceph" (OuterVolumeSpecName: "ceph") pod "7014f4b4-d256-4a19-9e35-1e3afc33fd76" (UID: "7014f4b4-d256-4a19-9e35-1e3afc33fd76"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.216309 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "7014f4b4-d256-4a19-9e35-1e3afc33fd76" (UID: "7014f4b4-d256-4a19-9e35-1e3afc33fd76"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.216776 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7014f4b4-d256-4a19-9e35-1e3afc33fd76-kube-api-access-drsjl" (OuterVolumeSpecName: "kube-api-access-drsjl") pod "7014f4b4-d256-4a19-9e35-1e3afc33fd76" (UID: "7014f4b4-d256-4a19-9e35-1e3afc33fd76"). InnerVolumeSpecName "kube-api-access-drsjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.241156 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7014f4b4-d256-4a19-9e35-1e3afc33fd76" (UID: "7014f4b4-d256-4a19-9e35-1e3afc33fd76"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.242770 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-inventory" (OuterVolumeSpecName: "inventory") pod "7014f4b4-d256-4a19-9e35-1e3afc33fd76" (UID: "7014f4b4-d256-4a19-9e35-1e3afc33fd76"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.247433 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "7014f4b4-d256-4a19-9e35-1e3afc33fd76" (UID: "7014f4b4-d256-4a19-9e35-1e3afc33fd76"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.313532 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.313744 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.313840 5081 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.313914 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drsjl\" (UniqueName: \"kubernetes.io/projected/7014f4b4-d256-4a19-9e35-1e3afc33fd76-kube-api-access-drsjl\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.313982 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.314062 5081 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7014f4b4-d256-4a19-9e35-1e3afc33fd76-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.698341 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" event={"ID":"7014f4b4-d256-4a19-9e35-1e3afc33fd76","Type":"ContainerDied","Data":"77b4b8882e38ed2b6d7e124f14c41634efbcae3e244245d8d1a5743871ce7c6a"} Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.698401 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77b4b8882e38ed2b6d7e124f14c41634efbcae3e244245d8d1a5743871ce7c6a" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.698513 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-5d9kd" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.769351 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-rwk8c"] Oct 03 17:44:03 crc kubenswrapper[5081]: E1003 17:44:03.769935 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="registry-server" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.769957 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="registry-server" Oct 03 17:44:03 crc kubenswrapper[5081]: E1003 17:44:03.769972 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="extract-utilities" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.769980 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="extract-utilities" Oct 03 17:44:03 crc kubenswrapper[5081]: E1003 17:44:03.770000 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7014f4b4-d256-4a19-9e35-1e3afc33fd76" containerName="libvirt-openstack-openstack-cell1" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.770008 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="7014f4b4-d256-4a19-9e35-1e3afc33fd76" containerName="libvirt-openstack-openstack-cell1" Oct 03 17:44:03 crc kubenswrapper[5081]: E1003 17:44:03.770039 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="extract-content" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.770047 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="extract-content" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.770323 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="7014f4b4-d256-4a19-9e35-1e3afc33fd76" containerName="libvirt-openstack-openstack-cell1" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.770355 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fe4088d-2645-4ae6-862c-4a1848bf338a" containerName="registry-server" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.771323 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.773730 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.773932 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.780595 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-rwk8c"] Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.783741 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.783866 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.783875 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.784005 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.784052 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.875853 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876051 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876094 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q94z4\" (UniqueName: \"kubernetes.io/projected/f85372bd-deb7-4f5d-b631-1b26524ca9ff-kube-api-access-q94z4\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876149 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876207 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876252 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876304 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876405 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.876963 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.877046 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-inventory\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.877289 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ceph\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979230 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979671 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979698 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q94z4\" (UniqueName: \"kubernetes.io/projected/f85372bd-deb7-4f5d-b631-1b26524ca9ff-kube-api-access-q94z4\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979748 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979791 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979836 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979859 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979923 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.979976 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.980030 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-inventory\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.980124 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ceph\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.983793 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.984592 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.985389 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.985523 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.987343 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.987793 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-inventory\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.988884 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ceph\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.989018 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.989816 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:03 crc kubenswrapper[5081]: I1003 17:44:03.995038 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:04 crc kubenswrapper[5081]: I1003 17:44:04.003884 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q94z4\" (UniqueName: \"kubernetes.io/projected/f85372bd-deb7-4f5d-b631-1b26524ca9ff-kube-api-access-q94z4\") pod \"nova-cell1-openstack-openstack-cell1-rwk8c\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:04 crc kubenswrapper[5081]: I1003 17:44:04.088365 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:44:04 crc kubenswrapper[5081]: I1003 17:44:04.612083 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-rwk8c"] Oct 03 17:44:04 crc kubenswrapper[5081]: I1003 17:44:04.623127 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:44:04 crc kubenswrapper[5081]: I1003 17:44:04.708406 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" event={"ID":"f85372bd-deb7-4f5d-b631-1b26524ca9ff","Type":"ContainerStarted","Data":"671fab856405f35811527448145df1cb5b3542e33b483366c13a7be38ddaf6f3"} Oct 03 17:44:06 crc kubenswrapper[5081]: I1003 17:44:06.728092 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" event={"ID":"f85372bd-deb7-4f5d-b631-1b26524ca9ff","Type":"ContainerStarted","Data":"927104332d39adbe4f30c11d4ffc5aaa91a1da4a749db9e3a8265014f37a0f4d"} Oct 03 17:44:06 crc kubenswrapper[5081]: I1003 17:44:06.756901 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" podStartSLOduration=2.524055411 podStartE2EDuration="3.756876855s" podCreationTimestamp="2025-10-03 17:44:03 +0000 UTC" firstStartedPulling="2025-10-03 17:44:04.622774644 +0000 UTC m=+8163.588331347" lastFinishedPulling="2025-10-03 17:44:05.855596178 +0000 UTC m=+8164.821152791" observedRunningTime="2025-10-03 17:44:06.749056221 +0000 UTC m=+8165.714612864" watchObservedRunningTime="2025-10-03 17:44:06.756876855 +0000 UTC m=+8165.722433478" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.697244 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6dhjn"] Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.699965 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.725506 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6dhjn"] Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.820811 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-utilities\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.821023 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58c2x\" (UniqueName: \"kubernetes.io/projected/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-kube-api-access-58c2x\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.821077 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-catalog-content\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.922511 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58c2x\" (UniqueName: \"kubernetes.io/projected/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-kube-api-access-58c2x\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.922823 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-catalog-content\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.923062 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-utilities\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.923969 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-utilities\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.924353 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-catalog-content\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:43 crc kubenswrapper[5081]: I1003 17:44:43.946264 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58c2x\" (UniqueName: \"kubernetes.io/projected/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-kube-api-access-58c2x\") pod \"redhat-operators-6dhjn\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:44 crc kubenswrapper[5081]: I1003 17:44:44.036010 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:44 crc kubenswrapper[5081]: I1003 17:44:44.551742 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6dhjn"] Oct 03 17:44:45 crc kubenswrapper[5081]: I1003 17:44:45.162128 5081 generic.go:334] "Generic (PLEG): container finished" podID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerID="cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297" exitCode=0 Oct 03 17:44:45 crc kubenswrapper[5081]: I1003 17:44:45.162193 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6dhjn" event={"ID":"fcb688f8-7842-413a-b8ef-ff9707eb7cf9","Type":"ContainerDied","Data":"cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297"} Oct 03 17:44:45 crc kubenswrapper[5081]: I1003 17:44:45.162551 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6dhjn" event={"ID":"fcb688f8-7842-413a-b8ef-ff9707eb7cf9","Type":"ContainerStarted","Data":"dbf6dca0c0e16fd71e3e0d0a00ccb4f9fafab9204860260618c5669b4be54624"} Oct 03 17:44:47 crc kubenswrapper[5081]: I1003 17:44:47.185087 5081 generic.go:334] "Generic (PLEG): container finished" podID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerID="300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011" exitCode=0 Oct 03 17:44:47 crc kubenswrapper[5081]: I1003 17:44:47.185268 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6dhjn" event={"ID":"fcb688f8-7842-413a-b8ef-ff9707eb7cf9","Type":"ContainerDied","Data":"300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011"} Oct 03 17:44:48 crc kubenswrapper[5081]: I1003 17:44:48.202581 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6dhjn" event={"ID":"fcb688f8-7842-413a-b8ef-ff9707eb7cf9","Type":"ContainerStarted","Data":"e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73"} Oct 03 17:44:48 crc kubenswrapper[5081]: I1003 17:44:48.226040 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6dhjn" podStartSLOduration=2.658344173 podStartE2EDuration="5.226023403s" podCreationTimestamp="2025-10-03 17:44:43 +0000 UTC" firstStartedPulling="2025-10-03 17:44:45.164118576 +0000 UTC m=+8204.129675189" lastFinishedPulling="2025-10-03 17:44:47.731797806 +0000 UTC m=+8206.697354419" observedRunningTime="2025-10-03 17:44:48.223094249 +0000 UTC m=+8207.188650882" watchObservedRunningTime="2025-10-03 17:44:48.226023403 +0000 UTC m=+8207.191580016" Oct 03 17:44:54 crc kubenswrapper[5081]: I1003 17:44:54.036603 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:54 crc kubenswrapper[5081]: I1003 17:44:54.037198 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:54 crc kubenswrapper[5081]: I1003 17:44:54.116137 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:54 crc kubenswrapper[5081]: I1003 17:44:54.359649 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:54 crc kubenswrapper[5081]: I1003 17:44:54.471399 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6dhjn"] Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.304189 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6dhjn" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="registry-server" containerID="cri-o://e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73" gracePeriod=2 Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.820861 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.981833 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-utilities\") pod \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.981985 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-catalog-content\") pod \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.982359 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58c2x\" (UniqueName: \"kubernetes.io/projected/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-kube-api-access-58c2x\") pod \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\" (UID: \"fcb688f8-7842-413a-b8ef-ff9707eb7cf9\") " Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.982921 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-utilities" (OuterVolumeSpecName: "utilities") pod "fcb688f8-7842-413a-b8ef-ff9707eb7cf9" (UID: "fcb688f8-7842-413a-b8ef-ff9707eb7cf9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.983162 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:56 crc kubenswrapper[5081]: I1003 17:44:56.987396 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-kube-api-access-58c2x" (OuterVolumeSpecName: "kube-api-access-58c2x") pod "fcb688f8-7842-413a-b8ef-ff9707eb7cf9" (UID: "fcb688f8-7842-413a-b8ef-ff9707eb7cf9"). InnerVolumeSpecName "kube-api-access-58c2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.062936 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcb688f8-7842-413a-b8ef-ff9707eb7cf9" (UID: "fcb688f8-7842-413a-b8ef-ff9707eb7cf9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.084904 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58c2x\" (UniqueName: \"kubernetes.io/projected/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-kube-api-access-58c2x\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.084936 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb688f8-7842-413a-b8ef-ff9707eb7cf9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.318855 5081 generic.go:334] "Generic (PLEG): container finished" podID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerID="e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73" exitCode=0 Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.318912 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6dhjn" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.318937 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6dhjn" event={"ID":"fcb688f8-7842-413a-b8ef-ff9707eb7cf9","Type":"ContainerDied","Data":"e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73"} Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.319351 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6dhjn" event={"ID":"fcb688f8-7842-413a-b8ef-ff9707eb7cf9","Type":"ContainerDied","Data":"dbf6dca0c0e16fd71e3e0d0a00ccb4f9fafab9204860260618c5669b4be54624"} Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.319397 5081 scope.go:117] "RemoveContainer" containerID="e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.340175 5081 scope.go:117] "RemoveContainer" containerID="300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.362002 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6dhjn"] Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.373140 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6dhjn"] Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.376161 5081 scope.go:117] "RemoveContainer" containerID="cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.429890 5081 scope.go:117] "RemoveContainer" containerID="e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73" Oct 03 17:44:57 crc kubenswrapper[5081]: E1003 17:44:57.430281 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73\": container with ID starting with e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73 not found: ID does not exist" containerID="e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.430320 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73"} err="failed to get container status \"e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73\": rpc error: code = NotFound desc = could not find container \"e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73\": container with ID starting with e856b33ce78c2119575a6d4d6144eac4a003df9610b98c4af0dd0d6577a1bd73 not found: ID does not exist" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.430346 5081 scope.go:117] "RemoveContainer" containerID="300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011" Oct 03 17:44:57 crc kubenswrapper[5081]: E1003 17:44:57.430624 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011\": container with ID starting with 300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011 not found: ID does not exist" containerID="300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.430659 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011"} err="failed to get container status \"300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011\": rpc error: code = NotFound desc = could not find container \"300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011\": container with ID starting with 300e464d470e353dddad31236908402ea931841b1787863b80e1302232ecb011 not found: ID does not exist" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.430688 5081 scope.go:117] "RemoveContainer" containerID="cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297" Oct 03 17:44:57 crc kubenswrapper[5081]: E1003 17:44:57.430952 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297\": container with ID starting with cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297 not found: ID does not exist" containerID="cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.430976 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297"} err="failed to get container status \"cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297\": rpc error: code = NotFound desc = could not find container \"cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297\": container with ID starting with cca4e590f5b06214548efcc91fb54ce7e6cbf7d77a05cef333c88b4e2102b297 not found: ID does not exist" Oct 03 17:44:57 crc kubenswrapper[5081]: I1003 17:44:57.842173 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" path="/var/lib/kubelet/pods/fcb688f8-7842-413a-b8ef-ff9707eb7cf9/volumes" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.149968 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld"] Oct 03 17:45:00 crc kubenswrapper[5081]: E1003 17:45:00.150761 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="registry-server" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.150776 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="registry-server" Oct 03 17:45:00 crc kubenswrapper[5081]: E1003 17:45:00.150795 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="extract-content" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.150801 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="extract-content" Oct 03 17:45:00 crc kubenswrapper[5081]: E1003 17:45:00.150813 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="extract-utilities" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.150820 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="extract-utilities" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.151085 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcb688f8-7842-413a-b8ef-ff9707eb7cf9" containerName="registry-server" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.151989 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.159755 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld"] Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.203390 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.203551 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.263445 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-config-volume\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.263799 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phfzs\" (UniqueName: \"kubernetes.io/projected/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-kube-api-access-phfzs\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.263860 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-secret-volume\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.366514 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-secret-volume\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.366779 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-config-volume\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.366814 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phfzs\" (UniqueName: \"kubernetes.io/projected/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-kube-api-access-phfzs\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.370347 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-config-volume\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.384878 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-secret-volume\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.397239 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phfzs\" (UniqueName: \"kubernetes.io/projected/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-kube-api-access-phfzs\") pod \"collect-profiles-29325225-wkhld\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.532437 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:00 crc kubenswrapper[5081]: I1003 17:45:00.972137 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld"] Oct 03 17:45:01 crc kubenswrapper[5081]: I1003 17:45:01.365702 5081 generic.go:334] "Generic (PLEG): container finished" podID="0b8e659d-1cb5-48fd-8d46-ab52af6ab236" containerID="ce61065a17400370f0078663a19c1c41cad450f0f60d06bcf04406eed4818d0c" exitCode=0 Oct 03 17:45:01 crc kubenswrapper[5081]: I1003 17:45:01.365893 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" event={"ID":"0b8e659d-1cb5-48fd-8d46-ab52af6ab236","Type":"ContainerDied","Data":"ce61065a17400370f0078663a19c1c41cad450f0f60d06bcf04406eed4818d0c"} Oct 03 17:45:01 crc kubenswrapper[5081]: I1003 17:45:01.366029 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" event={"ID":"0b8e659d-1cb5-48fd-8d46-ab52af6ab236","Type":"ContainerStarted","Data":"7371efeeaefb176ac43d82dc14c526a582924d20693d9ee95c066a44ec0f2462"} Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.824406 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.928118 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-config-volume\") pod \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.928224 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phfzs\" (UniqueName: \"kubernetes.io/projected/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-kube-api-access-phfzs\") pod \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.928350 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-secret-volume\") pod \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\" (UID: \"0b8e659d-1cb5-48fd-8d46-ab52af6ab236\") " Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.928856 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-config-volume" (OuterVolumeSpecName: "config-volume") pod "0b8e659d-1cb5-48fd-8d46-ab52af6ab236" (UID: "0b8e659d-1cb5-48fd-8d46-ab52af6ab236"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.929360 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.933891 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0b8e659d-1cb5-48fd-8d46-ab52af6ab236" (UID: "0b8e659d-1cb5-48fd-8d46-ab52af6ab236"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:45:02 crc kubenswrapper[5081]: I1003 17:45:02.934573 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-kube-api-access-phfzs" (OuterVolumeSpecName: "kube-api-access-phfzs") pod "0b8e659d-1cb5-48fd-8d46-ab52af6ab236" (UID: "0b8e659d-1cb5-48fd-8d46-ab52af6ab236"). InnerVolumeSpecName "kube-api-access-phfzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:45:03 crc kubenswrapper[5081]: I1003 17:45:03.031687 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phfzs\" (UniqueName: \"kubernetes.io/projected/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-kube-api-access-phfzs\") on node \"crc\" DevicePath \"\"" Oct 03 17:45:03 crc kubenswrapper[5081]: I1003 17:45:03.031721 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b8e659d-1cb5-48fd-8d46-ab52af6ab236-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 17:45:03 crc kubenswrapper[5081]: I1003 17:45:03.388704 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" event={"ID":"0b8e659d-1cb5-48fd-8d46-ab52af6ab236","Type":"ContainerDied","Data":"7371efeeaefb176ac43d82dc14c526a582924d20693d9ee95c066a44ec0f2462"} Oct 03 17:45:03 crc kubenswrapper[5081]: I1003 17:45:03.389088 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7371efeeaefb176ac43d82dc14c526a582924d20693d9ee95c066a44ec0f2462" Oct 03 17:45:03 crc kubenswrapper[5081]: I1003 17:45:03.388821 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325225-wkhld" Oct 03 17:45:03 crc kubenswrapper[5081]: I1003 17:45:03.905159 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5"] Oct 03 17:45:03 crc kubenswrapper[5081]: I1003 17:45:03.914757 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325180-dshw5"] Oct 03 17:45:05 crc kubenswrapper[5081]: I1003 17:45:05.850924 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fcb380a-2041-4c85-8746-9d5755da5e86" path="/var/lib/kubelet/pods/7fcb380a-2041-4c85-8746-9d5755da5e86/volumes" Oct 03 17:45:28 crc kubenswrapper[5081]: I1003 17:45:28.590592 5081 scope.go:117] "RemoveContainer" containerID="1f06df2ff0a4510184ceeb1e5cf00f3a466b8cb645f956dc10a151150e191a27" Oct 03 17:45:30 crc kubenswrapper[5081]: I1003 17:45:30.647068 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:45:30 crc kubenswrapper[5081]: I1003 17:45:30.647691 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:46:00 crc kubenswrapper[5081]: I1003 17:46:00.647987 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:46:00 crc kubenswrapper[5081]: I1003 17:46:00.648894 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:46:30 crc kubenswrapper[5081]: I1003 17:46:30.647321 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:46:30 crc kubenswrapper[5081]: I1003 17:46:30.647818 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:46:30 crc kubenswrapper[5081]: I1003 17:46:30.647867 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:46:30 crc kubenswrapper[5081]: I1003 17:46:30.648687 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a788edfbd2ea543eb0e0cc7f5af50f5657ccf2b2f7c993612d0aeb19f1ad91a9"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:46:30 crc kubenswrapper[5081]: I1003 17:46:30.648738 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://a788edfbd2ea543eb0e0cc7f5af50f5657ccf2b2f7c993612d0aeb19f1ad91a9" gracePeriod=600 Oct 03 17:46:31 crc kubenswrapper[5081]: I1003 17:46:31.353583 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="a788edfbd2ea543eb0e0cc7f5af50f5657ccf2b2f7c993612d0aeb19f1ad91a9" exitCode=0 Oct 03 17:46:31 crc kubenswrapper[5081]: I1003 17:46:31.353612 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"a788edfbd2ea543eb0e0cc7f5af50f5657ccf2b2f7c993612d0aeb19f1ad91a9"} Oct 03 17:46:31 crc kubenswrapper[5081]: I1003 17:46:31.353978 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a"} Oct 03 17:46:31 crc kubenswrapper[5081]: I1003 17:46:31.354006 5081 scope.go:117] "RemoveContainer" containerID="b728b07e6c01c075b4af90bb5544d3d58ba9c5d4bd9515f40b8a4ad1d317ac75" Oct 03 17:47:48 crc kubenswrapper[5081]: I1003 17:47:48.214743 5081 generic.go:334] "Generic (PLEG): container finished" podID="f85372bd-deb7-4f5d-b631-1b26524ca9ff" containerID="927104332d39adbe4f30c11d4ffc5aaa91a1da4a749db9e3a8265014f37a0f4d" exitCode=0 Oct 03 17:47:48 crc kubenswrapper[5081]: I1003 17:47:48.214829 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" event={"ID":"f85372bd-deb7-4f5d-b631-1b26524ca9ff","Type":"ContainerDied","Data":"927104332d39adbe4f30c11d4ffc5aaa91a1da4a749db9e3a8265014f37a0f4d"} Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.803792 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.960934 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-1\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961061 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-1\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961119 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-0\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961176 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-combined-ca-bundle\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961299 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-inventory\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961327 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-0\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961450 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-0\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961504 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ceph\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961533 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-1\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961614 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q94z4\" (UniqueName: \"kubernetes.io/projected/f85372bd-deb7-4f5d-b631-1b26524ca9ff-kube-api-access-q94z4\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.961672 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ssh-key\") pod \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\" (UID: \"f85372bd-deb7-4f5d-b631-1b26524ca9ff\") " Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.969750 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f85372bd-deb7-4f5d-b631-1b26524ca9ff-kube-api-access-q94z4" (OuterVolumeSpecName: "kube-api-access-q94z4") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "kube-api-access-q94z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.977750 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ceph" (OuterVolumeSpecName: "ceph") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.981880 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.994282 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:49 crc kubenswrapper[5081]: I1003 17:47:49.998092 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.001996 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.002206 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.011344 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.019366 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-inventory" (OuterVolumeSpecName: "inventory") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.023268 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.025485 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f85372bd-deb7-4f5d-b631-1b26524ca9ff" (UID: "f85372bd-deb7-4f5d-b631-1b26524ca9ff"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064672 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064732 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064760 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q94z4\" (UniqueName: \"kubernetes.io/projected/f85372bd-deb7-4f5d-b631-1b26524ca9ff-kube-api-access-q94z4\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064779 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064798 5081 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064818 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064840 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064859 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064877 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064900 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.064922 5081 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f85372bd-deb7-4f5d-b631-1b26524ca9ff-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.270886 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" event={"ID":"f85372bd-deb7-4f5d-b631-1b26524ca9ff","Type":"ContainerDied","Data":"671fab856405f35811527448145df1cb5b3542e33b483366c13a7be38ddaf6f3"} Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.270965 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-rwk8c" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.270981 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="671fab856405f35811527448145df1cb5b3542e33b483366c13a7be38ddaf6f3" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.348061 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-kvb6x"] Oct 03 17:47:50 crc kubenswrapper[5081]: E1003 17:47:50.348640 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b8e659d-1cb5-48fd-8d46-ab52af6ab236" containerName="collect-profiles" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.348668 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b8e659d-1cb5-48fd-8d46-ab52af6ab236" containerName="collect-profiles" Oct 03 17:47:50 crc kubenswrapper[5081]: E1003 17:47:50.348689 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85372bd-deb7-4f5d-b631-1b26524ca9ff" containerName="nova-cell1-openstack-openstack-cell1" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.348698 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85372bd-deb7-4f5d-b631-1b26524ca9ff" containerName="nova-cell1-openstack-openstack-cell1" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.348959 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85372bd-deb7-4f5d-b631-1b26524ca9ff" containerName="nova-cell1-openstack-openstack-cell1" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.348989 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b8e659d-1cb5-48fd-8d46-ab52af6ab236" containerName="collect-profiles" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.349984 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.353381 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.353388 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.353545 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.353804 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.353982 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379162 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379274 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379308 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ssh-key\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379395 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379512 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j6z4\" (UniqueName: \"kubernetes.io/projected/0f4a9c1c-622d-4712-9545-2ba16f2dd133-kube-api-access-6j6z4\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379693 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379846 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-inventory\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.379909 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceph\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.383495 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-kvb6x"] Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.486196 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceph\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.486421 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.486471 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.486533 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ssh-key\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.487394 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.487473 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j6z4\" (UniqueName: \"kubernetes.io/projected/0f4a9c1c-622d-4712-9545-2ba16f2dd133-kube-api-access-6j6z4\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.487585 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.487723 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-inventory\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.492201 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.495756 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ssh-key\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.495825 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.495970 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.496027 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceph\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.496928 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.505385 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-inventory\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.510316 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j6z4\" (UniqueName: \"kubernetes.io/projected/0f4a9c1c-622d-4712-9545-2ba16f2dd133-kube-api-access-6j6z4\") pod \"telemetry-openstack-openstack-cell1-kvb6x\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:50 crc kubenswrapper[5081]: I1003 17:47:50.680037 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:47:51 crc kubenswrapper[5081]: I1003 17:47:51.223385 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-kvb6x"] Oct 03 17:47:51 crc kubenswrapper[5081]: W1003 17:47:51.225588 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f4a9c1c_622d_4712_9545_2ba16f2dd133.slice/crio-3fe89e3ef5667a9fbc7a46f589679a41e980cca728d2fb44cf6d4a22fb2af944 WatchSource:0}: Error finding container 3fe89e3ef5667a9fbc7a46f589679a41e980cca728d2fb44cf6d4a22fb2af944: Status 404 returned error can't find the container with id 3fe89e3ef5667a9fbc7a46f589679a41e980cca728d2fb44cf6d4a22fb2af944 Oct 03 17:47:51 crc kubenswrapper[5081]: I1003 17:47:51.282003 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" event={"ID":"0f4a9c1c-622d-4712-9545-2ba16f2dd133","Type":"ContainerStarted","Data":"3fe89e3ef5667a9fbc7a46f589679a41e980cca728d2fb44cf6d4a22fb2af944"} Oct 03 17:47:53 crc kubenswrapper[5081]: I1003 17:47:53.299281 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" event={"ID":"0f4a9c1c-622d-4712-9545-2ba16f2dd133","Type":"ContainerStarted","Data":"e0bf263b526f7cf7c01d78b4abd1d03c5e4d2ea3b032e4ec756c11873ab0a0ed"} Oct 03 17:47:53 crc kubenswrapper[5081]: I1003 17:47:53.327929 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" podStartSLOduration=2.432763691 podStartE2EDuration="3.327907573s" podCreationTimestamp="2025-10-03 17:47:50 +0000 UTC" firstStartedPulling="2025-10-03 17:47:51.227742524 +0000 UTC m=+8390.193299137" lastFinishedPulling="2025-10-03 17:47:52.122886406 +0000 UTC m=+8391.088443019" observedRunningTime="2025-10-03 17:47:53.31663091 +0000 UTC m=+8392.282187543" watchObservedRunningTime="2025-10-03 17:47:53.327907573 +0000 UTC m=+8392.293464196" Oct 03 17:49:00 crc kubenswrapper[5081]: I1003 17:49:00.647750 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:49:00 crc kubenswrapper[5081]: I1003 17:49:00.648487 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:49:30 crc kubenswrapper[5081]: I1003 17:49:30.648354 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:49:30 crc kubenswrapper[5081]: I1003 17:49:30.649026 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.271059 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tn7p8"] Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.282005 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.289009 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tn7p8"] Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.463821 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6tsb\" (UniqueName: \"kubernetes.io/projected/4930e77a-257b-44db-96df-c6c9ec54ec2a-kube-api-access-g6tsb\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.463915 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-catalog-content\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.463948 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-utilities\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.565838 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-catalog-content\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.566199 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-utilities\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.566399 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6tsb\" (UniqueName: \"kubernetes.io/projected/4930e77a-257b-44db-96df-c6c9ec54ec2a-kube-api-access-g6tsb\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.567318 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-catalog-content\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.567613 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-utilities\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.592435 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6tsb\" (UniqueName: \"kubernetes.io/projected/4930e77a-257b-44db-96df-c6c9ec54ec2a-kube-api-access-g6tsb\") pod \"community-operators-tn7p8\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:57 crc kubenswrapper[5081]: I1003 17:49:57.617747 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:49:58 crc kubenswrapper[5081]: I1003 17:49:58.122471 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tn7p8"] Oct 03 17:49:58 crc kubenswrapper[5081]: I1003 17:49:58.694979 5081 generic.go:334] "Generic (PLEG): container finished" podID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerID="0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee" exitCode=0 Oct 03 17:49:58 crc kubenswrapper[5081]: I1003 17:49:58.695189 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tn7p8" event={"ID":"4930e77a-257b-44db-96df-c6c9ec54ec2a","Type":"ContainerDied","Data":"0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee"} Oct 03 17:49:58 crc kubenswrapper[5081]: I1003 17:49:58.695378 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tn7p8" event={"ID":"4930e77a-257b-44db-96df-c6c9ec54ec2a","Type":"ContainerStarted","Data":"47d6a4a76a47c8a44e855bfa1302bdf1c455b250f9fd19dcbbd49f3e2ab73767"} Oct 03 17:49:58 crc kubenswrapper[5081]: I1003 17:49:58.697622 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:49:59 crc kubenswrapper[5081]: I1003 17:49:59.711451 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tn7p8" event={"ID":"4930e77a-257b-44db-96df-c6c9ec54ec2a","Type":"ContainerStarted","Data":"f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca"} Oct 03 17:50:00 crc kubenswrapper[5081]: I1003 17:50:00.647751 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:50:00 crc kubenswrapper[5081]: I1003 17:50:00.648212 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:50:00 crc kubenswrapper[5081]: I1003 17:50:00.648278 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:50:00 crc kubenswrapper[5081]: I1003 17:50:00.649389 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:50:00 crc kubenswrapper[5081]: I1003 17:50:00.649488 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" gracePeriod=600 Oct 03 17:50:00 crc kubenswrapper[5081]: I1003 17:50:00.724953 5081 generic.go:334] "Generic (PLEG): container finished" podID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerID="f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca" exitCode=0 Oct 03 17:50:00 crc kubenswrapper[5081]: I1003 17:50:00.724994 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tn7p8" event={"ID":"4930e77a-257b-44db-96df-c6c9ec54ec2a","Type":"ContainerDied","Data":"f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca"} Oct 03 17:50:00 crc kubenswrapper[5081]: E1003 17:50:00.770888 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:50:01 crc kubenswrapper[5081]: I1003 17:50:01.740294 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" exitCode=0 Oct 03 17:50:01 crc kubenswrapper[5081]: I1003 17:50:01.740338 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a"} Oct 03 17:50:01 crc kubenswrapper[5081]: I1003 17:50:01.740801 5081 scope.go:117] "RemoveContainer" containerID="a788edfbd2ea543eb0e0cc7f5af50f5657ccf2b2f7c993612d0aeb19f1ad91a9" Oct 03 17:50:01 crc kubenswrapper[5081]: I1003 17:50:01.741538 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:50:01 crc kubenswrapper[5081]: E1003 17:50:01.741842 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:50:01 crc kubenswrapper[5081]: I1003 17:50:01.743900 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tn7p8" event={"ID":"4930e77a-257b-44db-96df-c6c9ec54ec2a","Type":"ContainerStarted","Data":"9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264"} Oct 03 17:50:01 crc kubenswrapper[5081]: I1003 17:50:01.796323 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tn7p8" podStartSLOduration=2.144124753 podStartE2EDuration="4.796301888s" podCreationTimestamp="2025-10-03 17:49:57 +0000 UTC" firstStartedPulling="2025-10-03 17:49:58.697377231 +0000 UTC m=+8517.662933844" lastFinishedPulling="2025-10-03 17:50:01.349554366 +0000 UTC m=+8520.315110979" observedRunningTime="2025-10-03 17:50:01.78589757 +0000 UTC m=+8520.751454223" watchObservedRunningTime="2025-10-03 17:50:01.796301888 +0000 UTC m=+8520.761858501" Oct 03 17:50:07 crc kubenswrapper[5081]: I1003 17:50:07.618289 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:50:07 crc kubenswrapper[5081]: I1003 17:50:07.619207 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:50:07 crc kubenswrapper[5081]: I1003 17:50:07.718863 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:50:07 crc kubenswrapper[5081]: I1003 17:50:07.857529 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:50:07 crc kubenswrapper[5081]: I1003 17:50:07.960191 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tn7p8"] Oct 03 17:50:09 crc kubenswrapper[5081]: I1003 17:50:09.827405 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tn7p8" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="registry-server" containerID="cri-o://9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264" gracePeriod=2 Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.395574 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.501520 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6tsb\" (UniqueName: \"kubernetes.io/projected/4930e77a-257b-44db-96df-c6c9ec54ec2a-kube-api-access-g6tsb\") pod \"4930e77a-257b-44db-96df-c6c9ec54ec2a\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.501626 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-utilities\") pod \"4930e77a-257b-44db-96df-c6c9ec54ec2a\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.502541 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-catalog-content\") pod \"4930e77a-257b-44db-96df-c6c9ec54ec2a\" (UID: \"4930e77a-257b-44db-96df-c6c9ec54ec2a\") " Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.505547 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-utilities" (OuterVolumeSpecName: "utilities") pod "4930e77a-257b-44db-96df-c6c9ec54ec2a" (UID: "4930e77a-257b-44db-96df-c6c9ec54ec2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.517489 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.547093 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4930e77a-257b-44db-96df-c6c9ec54ec2a-kube-api-access-g6tsb" (OuterVolumeSpecName: "kube-api-access-g6tsb") pod "4930e77a-257b-44db-96df-c6c9ec54ec2a" (UID: "4930e77a-257b-44db-96df-c6c9ec54ec2a"). InnerVolumeSpecName "kube-api-access-g6tsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.620351 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6tsb\" (UniqueName: \"kubernetes.io/projected/4930e77a-257b-44db-96df-c6c9ec54ec2a-kube-api-access-g6tsb\") on node \"crc\" DevicePath \"\"" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.844226 5081 generic.go:334] "Generic (PLEG): container finished" podID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerID="9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264" exitCode=0 Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.844341 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tn7p8" event={"ID":"4930e77a-257b-44db-96df-c6c9ec54ec2a","Type":"ContainerDied","Data":"9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264"} Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.844375 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tn7p8" event={"ID":"4930e77a-257b-44db-96df-c6c9ec54ec2a","Type":"ContainerDied","Data":"47d6a4a76a47c8a44e855bfa1302bdf1c455b250f9fd19dcbbd49f3e2ab73767"} Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.844405 5081 scope.go:117] "RemoveContainer" containerID="9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.844813 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tn7p8" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.875745 5081 scope.go:117] "RemoveContainer" containerID="f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.908008 5081 scope.go:117] "RemoveContainer" containerID="0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.946378 5081 scope.go:117] "RemoveContainer" containerID="9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264" Oct 03 17:50:10 crc kubenswrapper[5081]: E1003 17:50:10.946866 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264\": container with ID starting with 9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264 not found: ID does not exist" containerID="9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.946923 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264"} err="failed to get container status \"9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264\": rpc error: code = NotFound desc = could not find container \"9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264\": container with ID starting with 9a8b9c8f43663843b2dc4092700ed1a3d0549a5a3eacfcca7b7a8cc351caf264 not found: ID does not exist" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.946952 5081 scope.go:117] "RemoveContainer" containerID="f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca" Oct 03 17:50:10 crc kubenswrapper[5081]: E1003 17:50:10.947634 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca\": container with ID starting with f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca not found: ID does not exist" containerID="f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.947675 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca"} err="failed to get container status \"f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca\": rpc error: code = NotFound desc = could not find container \"f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca\": container with ID starting with f03c3d42fb5f0f23a861798b366d103b0f2cc5430c077c1c0cc2fa31ce91e4ca not found: ID does not exist" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.947700 5081 scope.go:117] "RemoveContainer" containerID="0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee" Oct 03 17:50:10 crc kubenswrapper[5081]: E1003 17:50:10.948006 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee\": container with ID starting with 0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee not found: ID does not exist" containerID="0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee" Oct 03 17:50:10 crc kubenswrapper[5081]: I1003 17:50:10.948026 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee"} err="failed to get container status \"0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee\": rpc error: code = NotFound desc = could not find container \"0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee\": container with ID starting with 0534ad171076475c4dc18656afef861888abcd07cb86a7504ba18ca47cc235ee not found: ID does not exist" Oct 03 17:50:11 crc kubenswrapper[5081]: I1003 17:50:11.285275 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4930e77a-257b-44db-96df-c6c9ec54ec2a" (UID: "4930e77a-257b-44db-96df-c6c9ec54ec2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:50:11 crc kubenswrapper[5081]: I1003 17:50:11.337257 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4930e77a-257b-44db-96df-c6c9ec54ec2a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:50:11 crc kubenswrapper[5081]: I1003 17:50:11.495182 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tn7p8"] Oct 03 17:50:11 crc kubenswrapper[5081]: I1003 17:50:11.526252 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tn7p8"] Oct 03 17:50:11 crc kubenswrapper[5081]: I1003 17:50:11.865605 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" path="/var/lib/kubelet/pods/4930e77a-257b-44db-96df-c6c9ec54ec2a/volumes" Oct 03 17:50:16 crc kubenswrapper[5081]: I1003 17:50:16.827510 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:50:16 crc kubenswrapper[5081]: E1003 17:50:16.828543 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:50:29 crc kubenswrapper[5081]: I1003 17:50:29.828177 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:50:29 crc kubenswrapper[5081]: E1003 17:50:29.829074 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.221782 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lx452"] Oct 03 17:50:30 crc kubenswrapper[5081]: E1003 17:50:30.222210 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="registry-server" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.222227 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="registry-server" Oct 03 17:50:30 crc kubenswrapper[5081]: E1003 17:50:30.222244 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="extract-utilities" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.222252 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="extract-utilities" Oct 03 17:50:30 crc kubenswrapper[5081]: E1003 17:50:30.222291 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="extract-content" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.222297 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="extract-content" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.222527 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="4930e77a-257b-44db-96df-c6c9ec54ec2a" containerName="registry-server" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.224738 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.238735 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lx452"] Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.321333 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-catalog-content\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.321384 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4dpb\" (UniqueName: \"kubernetes.io/projected/8ce61f6b-b76a-4532-9a3b-834615b9be92-kube-api-access-x4dpb\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.321498 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-utilities\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.422769 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-utilities\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.423111 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-catalog-content\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.423140 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4dpb\" (UniqueName: \"kubernetes.io/projected/8ce61f6b-b76a-4532-9a3b-834615b9be92-kube-api-access-x4dpb\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.424038 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-utilities\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.424252 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-catalog-content\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.449991 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4dpb\" (UniqueName: \"kubernetes.io/projected/8ce61f6b-b76a-4532-9a3b-834615b9be92-kube-api-access-x4dpb\") pod \"certified-operators-lx452\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:30 crc kubenswrapper[5081]: I1003 17:50:30.547303 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:31 crc kubenswrapper[5081]: I1003 17:50:31.084014 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lx452"] Oct 03 17:50:32 crc kubenswrapper[5081]: I1003 17:50:32.098720 5081 generic.go:334] "Generic (PLEG): container finished" podID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerID="9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85" exitCode=0 Oct 03 17:50:32 crc kubenswrapper[5081]: I1003 17:50:32.098770 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lx452" event={"ID":"8ce61f6b-b76a-4532-9a3b-834615b9be92","Type":"ContainerDied","Data":"9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85"} Oct 03 17:50:32 crc kubenswrapper[5081]: I1003 17:50:32.099245 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lx452" event={"ID":"8ce61f6b-b76a-4532-9a3b-834615b9be92","Type":"ContainerStarted","Data":"abe2b7bea411f74123d439f131c9b57b5b25711c3cb6b95ee5a7f4a0099b6055"} Oct 03 17:50:33 crc kubenswrapper[5081]: I1003 17:50:33.111629 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lx452" event={"ID":"8ce61f6b-b76a-4532-9a3b-834615b9be92","Type":"ContainerStarted","Data":"45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440"} Oct 03 17:50:34 crc kubenswrapper[5081]: I1003 17:50:34.122880 5081 generic.go:334] "Generic (PLEG): container finished" podID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerID="45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440" exitCode=0 Oct 03 17:50:34 crc kubenswrapper[5081]: I1003 17:50:34.123080 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lx452" event={"ID":"8ce61f6b-b76a-4532-9a3b-834615b9be92","Type":"ContainerDied","Data":"45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440"} Oct 03 17:50:35 crc kubenswrapper[5081]: I1003 17:50:35.135336 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lx452" event={"ID":"8ce61f6b-b76a-4532-9a3b-834615b9be92","Type":"ContainerStarted","Data":"bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f"} Oct 03 17:50:35 crc kubenswrapper[5081]: I1003 17:50:35.158372 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lx452" podStartSLOduration=2.677243704 podStartE2EDuration="5.158352697s" podCreationTimestamp="2025-10-03 17:50:30 +0000 UTC" firstStartedPulling="2025-10-03 17:50:32.100964093 +0000 UTC m=+8551.066520706" lastFinishedPulling="2025-10-03 17:50:34.582073086 +0000 UTC m=+8553.547629699" observedRunningTime="2025-10-03 17:50:35.150816791 +0000 UTC m=+8554.116373394" watchObservedRunningTime="2025-10-03 17:50:35.158352697 +0000 UTC m=+8554.123909310" Oct 03 17:50:40 crc kubenswrapper[5081]: I1003 17:50:40.547666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:40 crc kubenswrapper[5081]: I1003 17:50:40.548255 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:40 crc kubenswrapper[5081]: I1003 17:50:40.614896 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:41 crc kubenswrapper[5081]: I1003 17:50:41.255363 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:41 crc kubenswrapper[5081]: I1003 17:50:41.305459 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lx452"] Oct 03 17:50:41 crc kubenswrapper[5081]: I1003 17:50:41.837944 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:50:41 crc kubenswrapper[5081]: E1003 17:50:41.838836 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:50:43 crc kubenswrapper[5081]: I1003 17:50:43.224701 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lx452" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="registry-server" containerID="cri-o://bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f" gracePeriod=2 Oct 03 17:50:43 crc kubenswrapper[5081]: I1003 17:50:43.768471 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:43 crc kubenswrapper[5081]: I1003 17:50:43.961016 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4dpb\" (UniqueName: \"kubernetes.io/projected/8ce61f6b-b76a-4532-9a3b-834615b9be92-kube-api-access-x4dpb\") pod \"8ce61f6b-b76a-4532-9a3b-834615b9be92\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " Oct 03 17:50:43 crc kubenswrapper[5081]: I1003 17:50:43.961408 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-catalog-content\") pod \"8ce61f6b-b76a-4532-9a3b-834615b9be92\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " Oct 03 17:50:43 crc kubenswrapper[5081]: I1003 17:50:43.961450 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-utilities\") pod \"8ce61f6b-b76a-4532-9a3b-834615b9be92\" (UID: \"8ce61f6b-b76a-4532-9a3b-834615b9be92\") " Oct 03 17:50:43 crc kubenswrapper[5081]: I1003 17:50:43.963256 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-utilities" (OuterVolumeSpecName: "utilities") pod "8ce61f6b-b76a-4532-9a3b-834615b9be92" (UID: "8ce61f6b-b76a-4532-9a3b-834615b9be92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:50:43 crc kubenswrapper[5081]: I1003 17:50:43.968897 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ce61f6b-b76a-4532-9a3b-834615b9be92-kube-api-access-x4dpb" (OuterVolumeSpecName: "kube-api-access-x4dpb") pod "8ce61f6b-b76a-4532-9a3b-834615b9be92" (UID: "8ce61f6b-b76a-4532-9a3b-834615b9be92"). InnerVolumeSpecName "kube-api-access-x4dpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.008767 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ce61f6b-b76a-4532-9a3b-834615b9be92" (UID: "8ce61f6b-b76a-4532-9a3b-834615b9be92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.064034 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4dpb\" (UniqueName: \"kubernetes.io/projected/8ce61f6b-b76a-4532-9a3b-834615b9be92-kube-api-access-x4dpb\") on node \"crc\" DevicePath \"\"" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.064072 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.064083 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ce61f6b-b76a-4532-9a3b-834615b9be92-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.253331 5081 generic.go:334] "Generic (PLEG): container finished" podID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerID="bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f" exitCode=0 Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.253399 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lx452" event={"ID":"8ce61f6b-b76a-4532-9a3b-834615b9be92","Type":"ContainerDied","Data":"bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f"} Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.253427 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lx452" event={"ID":"8ce61f6b-b76a-4532-9a3b-834615b9be92","Type":"ContainerDied","Data":"abe2b7bea411f74123d439f131c9b57b5b25711c3cb6b95ee5a7f4a0099b6055"} Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.253443 5081 scope.go:117] "RemoveContainer" containerID="bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.253614 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lx452" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.278800 5081 scope.go:117] "RemoveContainer" containerID="45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.289828 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lx452"] Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.300584 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lx452"] Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.322187 5081 scope.go:117] "RemoveContainer" containerID="9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.361968 5081 scope.go:117] "RemoveContainer" containerID="bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f" Oct 03 17:50:44 crc kubenswrapper[5081]: E1003 17:50:44.362821 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f\": container with ID starting with bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f not found: ID does not exist" containerID="bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.362848 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f"} err="failed to get container status \"bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f\": rpc error: code = NotFound desc = could not find container \"bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f\": container with ID starting with bb967bc15a5e7f6c0221815d5fb22bae70466e473efeea763b8ef465df4a827f not found: ID does not exist" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.362869 5081 scope.go:117] "RemoveContainer" containerID="45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440" Oct 03 17:50:44 crc kubenswrapper[5081]: E1003 17:50:44.363183 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440\": container with ID starting with 45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440 not found: ID does not exist" containerID="45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.363220 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440"} err="failed to get container status \"45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440\": rpc error: code = NotFound desc = could not find container \"45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440\": container with ID starting with 45f24c55251daebf53f2d85989a70ff2f34401a5d9cbce982f448416a5c23440 not found: ID does not exist" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.363244 5081 scope.go:117] "RemoveContainer" containerID="9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85" Oct 03 17:50:44 crc kubenswrapper[5081]: E1003 17:50:44.363712 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85\": container with ID starting with 9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85 not found: ID does not exist" containerID="9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85" Oct 03 17:50:44 crc kubenswrapper[5081]: I1003 17:50:44.363733 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85"} err="failed to get container status \"9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85\": rpc error: code = NotFound desc = could not find container \"9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85\": container with ID starting with 9deec4a91074d4d1cc6f672046ca66759701f3b4bedcf981b02eaa98436b1a85 not found: ID does not exist" Oct 03 17:50:45 crc kubenswrapper[5081]: I1003 17:50:45.841248 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" path="/var/lib/kubelet/pods/8ce61f6b-b76a-4532-9a3b-834615b9be92/volumes" Oct 03 17:50:53 crc kubenswrapper[5081]: I1003 17:50:53.828750 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:50:53 crc kubenswrapper[5081]: E1003 17:50:53.830284 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:51:07 crc kubenswrapper[5081]: E1003 17:51:07.579276 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 03 17:51:08 crc kubenswrapper[5081]: I1003 17:51:08.829689 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:51:08 crc kubenswrapper[5081]: E1003 17:51:08.830599 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:51:17 crc kubenswrapper[5081]: E1003 17:51:17.840469 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 03 17:51:21 crc kubenswrapper[5081]: I1003 17:51:21.836061 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:51:21 crc kubenswrapper[5081]: E1003 17:51:21.837053 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:51:28 crc kubenswrapper[5081]: E1003 17:51:28.163273 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 03 17:51:34 crc kubenswrapper[5081]: I1003 17:51:34.828626 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:51:34 crc kubenswrapper[5081]: E1003 17:51:34.829948 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:51:38 crc kubenswrapper[5081]: E1003 17:51:38.486811 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 03 17:51:48 crc kubenswrapper[5081]: E1003 17:51:48.782742 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 03 17:51:48 crc kubenswrapper[5081]: I1003 17:51:48.827528 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:51:48 crc kubenswrapper[5081]: E1003 17:51:48.827816 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:51:59 crc kubenswrapper[5081]: E1003 17:51:59.062575 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 03 17:52:03 crc kubenswrapper[5081]: I1003 17:52:03.833051 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:52:03 crc kubenswrapper[5081]: E1003 17:52:03.833857 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:52:14 crc kubenswrapper[5081]: I1003 17:52:14.828477 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:52:14 crc kubenswrapper[5081]: E1003 17:52:14.829422 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:52:26 crc kubenswrapper[5081]: I1003 17:52:26.829028 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:52:26 crc kubenswrapper[5081]: E1003 17:52:26.830001 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:52:29 crc kubenswrapper[5081]: I1003 17:52:29.384446 5081 generic.go:334] "Generic (PLEG): container finished" podID="0f4a9c1c-622d-4712-9545-2ba16f2dd133" containerID="e0bf263b526f7cf7c01d78b4abd1d03c5e4d2ea3b032e4ec756c11873ab0a0ed" exitCode=0 Oct 03 17:52:29 crc kubenswrapper[5081]: I1003 17:52:29.384514 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" event={"ID":"0f4a9c1c-622d-4712-9545-2ba16f2dd133","Type":"ContainerDied","Data":"e0bf263b526f7cf7c01d78b4abd1d03c5e4d2ea3b032e4ec756c11873ab0a0ed"} Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.823931 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.948953 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-0\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.949061 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j6z4\" (UniqueName: \"kubernetes.io/projected/0f4a9c1c-622d-4712-9545-2ba16f2dd133-kube-api-access-6j6z4\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.949104 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ssh-key\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.949160 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-2\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.949212 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-telemetry-combined-ca-bundle\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.949936 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceph\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.949976 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-1\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.950006 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-inventory\") pod \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\" (UID: \"0f4a9c1c-622d-4712-9545-2ba16f2dd133\") " Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.958768 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.958983 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f4a9c1c-622d-4712-9545-2ba16f2dd133-kube-api-access-6j6z4" (OuterVolumeSpecName: "kube-api-access-6j6z4") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "kube-api-access-6j6z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:52:30 crc kubenswrapper[5081]: I1003 17:52:30.992762 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceph" (OuterVolumeSpecName: "ceph") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.042359 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.055086 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.055118 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j6z4\" (UniqueName: \"kubernetes.io/projected/0f4a9c1c-622d-4712-9545-2ba16f2dd133-kube-api-access-6j6z4\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.055133 5081 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.055144 5081 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.073731 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.101729 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.102753 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-inventory" (OuterVolumeSpecName: "inventory") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.109843 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "0f4a9c1c-622d-4712-9545-2ba16f2dd133" (UID: "0f4a9c1c-622d-4712-9545-2ba16f2dd133"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.157326 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.157361 5081 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.157373 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.157385 5081 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/0f4a9c1c-622d-4712-9545-2ba16f2dd133-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.406424 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" event={"ID":"0f4a9c1c-622d-4712-9545-2ba16f2dd133","Type":"ContainerDied","Data":"3fe89e3ef5667a9fbc7a46f589679a41e980cca728d2fb44cf6d4a22fb2af944"} Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.406470 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kvb6x" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.406474 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fe89e3ef5667a9fbc7a46f589679a41e980cca728d2fb44cf6d4a22fb2af944" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.513420 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-dnmvd"] Oct 03 17:52:31 crc kubenswrapper[5081]: E1003 17:52:31.513858 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f4a9c1c-622d-4712-9545-2ba16f2dd133" containerName="telemetry-openstack-openstack-cell1" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.513874 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f4a9c1c-622d-4712-9545-2ba16f2dd133" containerName="telemetry-openstack-openstack-cell1" Oct 03 17:52:31 crc kubenswrapper[5081]: E1003 17:52:31.513914 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="extract-content" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.513920 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="extract-content" Oct 03 17:52:31 crc kubenswrapper[5081]: E1003 17:52:31.513936 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="registry-server" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.513943 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="registry-server" Oct 03 17:52:31 crc kubenswrapper[5081]: E1003 17:52:31.513951 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="extract-utilities" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.513959 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="extract-utilities" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.514134 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ce61f6b-b76a-4532-9a3b-834615b9be92" containerName="registry-server" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.514167 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f4a9c1c-622d-4712-9545-2ba16f2dd133" containerName="telemetry-openstack-openstack-cell1" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.514854 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.517882 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.518188 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.520454 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.520622 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.522810 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.533286 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-dnmvd"] Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.670631 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.670729 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.670768 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcdfg\" (UniqueName: \"kubernetes.io/projected/8c4c91ae-3176-4f71-862a-e818d8d7f212-kube-api-access-xcdfg\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.670799 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.670833 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.670905 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.772527 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.773502 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.773758 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.774190 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.775027 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcdfg\" (UniqueName: \"kubernetes.io/projected/8c4c91ae-3176-4f71-862a-e818d8d7f212-kube-api-access-xcdfg\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.775194 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.777441 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.777775 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.778366 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.779680 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.782883 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.806917 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcdfg\" (UniqueName: \"kubernetes.io/projected/8c4c91ae-3176-4f71-862a-e818d8d7f212-kube-api-access-xcdfg\") pod \"neutron-sriov-openstack-openstack-cell1-dnmvd\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:31 crc kubenswrapper[5081]: I1003 17:52:31.838551 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:52:32 crc kubenswrapper[5081]: I1003 17:52:32.439201 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-dnmvd"] Oct 03 17:52:33 crc kubenswrapper[5081]: I1003 17:52:33.435923 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" event={"ID":"8c4c91ae-3176-4f71-862a-e818d8d7f212","Type":"ContainerStarted","Data":"37b0f0b82c48a1602b179200fc356c187808999ee76c79c2c1f79b7558c6a4f7"} Oct 03 17:52:33 crc kubenswrapper[5081]: I1003 17:52:33.436688 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" event={"ID":"8c4c91ae-3176-4f71-862a-e818d8d7f212","Type":"ContainerStarted","Data":"200660c2cca5c9e4f5e6c7b7ca390af2393a572a76c6f779bf491d0873130cae"} Oct 03 17:52:33 crc kubenswrapper[5081]: I1003 17:52:33.465734 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" podStartSLOduration=2.025047982 podStartE2EDuration="2.465715389s" podCreationTimestamp="2025-10-03 17:52:31 +0000 UTC" firstStartedPulling="2025-10-03 17:52:32.444934461 +0000 UTC m=+8671.410491074" lastFinishedPulling="2025-10-03 17:52:32.885601828 +0000 UTC m=+8671.851158481" observedRunningTime="2025-10-03 17:52:33.452655564 +0000 UTC m=+8672.418212187" watchObservedRunningTime="2025-10-03 17:52:33.465715389 +0000 UTC m=+8672.431272002" Oct 03 17:52:38 crc kubenswrapper[5081]: I1003 17:52:38.828037 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:52:38 crc kubenswrapper[5081]: E1003 17:52:38.828827 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:52:44 crc kubenswrapper[5081]: I1003 17:52:44.871801 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mdpsz"] Oct 03 17:52:44 crc kubenswrapper[5081]: I1003 17:52:44.875358 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:44 crc kubenswrapper[5081]: I1003 17:52:44.889123 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdpsz"] Oct 03 17:52:44 crc kubenswrapper[5081]: I1003 17:52:44.969963 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7jc7\" (UniqueName: \"kubernetes.io/projected/b3137283-f74c-4858-97a0-9eef1cc3de4f-kube-api-access-d7jc7\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:44 crc kubenswrapper[5081]: I1003 17:52:44.970014 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-utilities\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:44 crc kubenswrapper[5081]: I1003 17:52:44.970055 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-catalog-content\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.075120 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7jc7\" (UniqueName: \"kubernetes.io/projected/b3137283-f74c-4858-97a0-9eef1cc3de4f-kube-api-access-d7jc7\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.075175 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-utilities\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.075222 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-catalog-content\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.075830 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-utilities\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.075830 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-catalog-content\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.101711 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7jc7\" (UniqueName: \"kubernetes.io/projected/b3137283-f74c-4858-97a0-9eef1cc3de4f-kube-api-access-d7jc7\") pod \"redhat-marketplace-mdpsz\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.211549 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:45 crc kubenswrapper[5081]: I1003 17:52:45.742101 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdpsz"] Oct 03 17:52:45 crc kubenswrapper[5081]: W1003 17:52:45.745704 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3137283_f74c_4858_97a0_9eef1cc3de4f.slice/crio-72f3f15269fa4f27c1af99b37c88a0628aafe5a50cb8e745884a09713c08ff3d WatchSource:0}: Error finding container 72f3f15269fa4f27c1af99b37c88a0628aafe5a50cb8e745884a09713c08ff3d: Status 404 returned error can't find the container with id 72f3f15269fa4f27c1af99b37c88a0628aafe5a50cb8e745884a09713c08ff3d Oct 03 17:52:46 crc kubenswrapper[5081]: I1003 17:52:46.597744 5081 generic.go:334] "Generic (PLEG): container finished" podID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerID="1ffcef04404598e2d0d01ab42294c45401aab2272d6f50a808dcede35f6f81dd" exitCode=0 Oct 03 17:52:46 crc kubenswrapper[5081]: I1003 17:52:46.597889 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdpsz" event={"ID":"b3137283-f74c-4858-97a0-9eef1cc3de4f","Type":"ContainerDied","Data":"1ffcef04404598e2d0d01ab42294c45401aab2272d6f50a808dcede35f6f81dd"} Oct 03 17:52:46 crc kubenswrapper[5081]: I1003 17:52:46.598098 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdpsz" event={"ID":"b3137283-f74c-4858-97a0-9eef1cc3de4f","Type":"ContainerStarted","Data":"72f3f15269fa4f27c1af99b37c88a0628aafe5a50cb8e745884a09713c08ff3d"} Oct 03 17:52:47 crc kubenswrapper[5081]: I1003 17:52:47.612966 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdpsz" event={"ID":"b3137283-f74c-4858-97a0-9eef1cc3de4f","Type":"ContainerStarted","Data":"b13fce585954a9f620b391abadab31b4eec30eaf53d087f228848e6070be8040"} Oct 03 17:52:48 crc kubenswrapper[5081]: I1003 17:52:48.624062 5081 generic.go:334] "Generic (PLEG): container finished" podID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerID="b13fce585954a9f620b391abadab31b4eec30eaf53d087f228848e6070be8040" exitCode=0 Oct 03 17:52:48 crc kubenswrapper[5081]: I1003 17:52:48.624161 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdpsz" event={"ID":"b3137283-f74c-4858-97a0-9eef1cc3de4f","Type":"ContainerDied","Data":"b13fce585954a9f620b391abadab31b4eec30eaf53d087f228848e6070be8040"} Oct 03 17:52:50 crc kubenswrapper[5081]: I1003 17:52:50.646797 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdpsz" event={"ID":"b3137283-f74c-4858-97a0-9eef1cc3de4f","Type":"ContainerStarted","Data":"9e16368c2b42da7aaf964f7c2a0fe43856af8cccc4af8855e4d709f35a968358"} Oct 03 17:52:50 crc kubenswrapper[5081]: I1003 17:52:50.668049 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mdpsz" podStartSLOduration=4.198879427 podStartE2EDuration="6.668028657s" podCreationTimestamp="2025-10-03 17:52:44 +0000 UTC" firstStartedPulling="2025-10-03 17:52:46.600076098 +0000 UTC m=+8685.565632711" lastFinishedPulling="2025-10-03 17:52:49.069225318 +0000 UTC m=+8688.034781941" observedRunningTime="2025-10-03 17:52:50.663545468 +0000 UTC m=+8689.629102091" watchObservedRunningTime="2025-10-03 17:52:50.668028657 +0000 UTC m=+8689.633585290" Oct 03 17:52:51 crc kubenswrapper[5081]: I1003 17:52:51.835658 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:52:51 crc kubenswrapper[5081]: E1003 17:52:51.836007 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:52:55 crc kubenswrapper[5081]: I1003 17:52:55.211745 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:55 crc kubenswrapper[5081]: I1003 17:52:55.212226 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:55 crc kubenswrapper[5081]: I1003 17:52:55.261192 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:55 crc kubenswrapper[5081]: I1003 17:52:55.747583 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:55 crc kubenswrapper[5081]: I1003 17:52:55.808992 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdpsz"] Oct 03 17:52:57 crc kubenswrapper[5081]: I1003 17:52:57.717131 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mdpsz" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="registry-server" containerID="cri-o://9e16368c2b42da7aaf964f7c2a0fe43856af8cccc4af8855e4d709f35a968358" gracePeriod=2 Oct 03 17:52:58 crc kubenswrapper[5081]: I1003 17:52:58.733668 5081 generic.go:334] "Generic (PLEG): container finished" podID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerID="9e16368c2b42da7aaf964f7c2a0fe43856af8cccc4af8855e4d709f35a968358" exitCode=0 Oct 03 17:52:58 crc kubenswrapper[5081]: I1003 17:52:58.733844 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdpsz" event={"ID":"b3137283-f74c-4858-97a0-9eef1cc3de4f","Type":"ContainerDied","Data":"9e16368c2b42da7aaf964f7c2a0fe43856af8cccc4af8855e4d709f35a968358"} Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.418760 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.469797 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-utilities\") pod \"b3137283-f74c-4858-97a0-9eef1cc3de4f\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.470079 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7jc7\" (UniqueName: \"kubernetes.io/projected/b3137283-f74c-4858-97a0-9eef1cc3de4f-kube-api-access-d7jc7\") pod \"b3137283-f74c-4858-97a0-9eef1cc3de4f\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.470122 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-catalog-content\") pod \"b3137283-f74c-4858-97a0-9eef1cc3de4f\" (UID: \"b3137283-f74c-4858-97a0-9eef1cc3de4f\") " Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.470522 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-utilities" (OuterVolumeSpecName: "utilities") pod "b3137283-f74c-4858-97a0-9eef1cc3de4f" (UID: "b3137283-f74c-4858-97a0-9eef1cc3de4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.470825 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.490907 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3137283-f74c-4858-97a0-9eef1cc3de4f-kube-api-access-d7jc7" (OuterVolumeSpecName: "kube-api-access-d7jc7") pod "b3137283-f74c-4858-97a0-9eef1cc3de4f" (UID: "b3137283-f74c-4858-97a0-9eef1cc3de4f"). InnerVolumeSpecName "kube-api-access-d7jc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.507476 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3137283-f74c-4858-97a0-9eef1cc3de4f" (UID: "b3137283-f74c-4858-97a0-9eef1cc3de4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.573358 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7jc7\" (UniqueName: \"kubernetes.io/projected/b3137283-f74c-4858-97a0-9eef1cc3de4f-kube-api-access-d7jc7\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.573395 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3137283-f74c-4858-97a0-9eef1cc3de4f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.770256 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdpsz" event={"ID":"b3137283-f74c-4858-97a0-9eef1cc3de4f","Type":"ContainerDied","Data":"72f3f15269fa4f27c1af99b37c88a0628aafe5a50cb8e745884a09713c08ff3d"} Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.771239 5081 scope.go:117] "RemoveContainer" containerID="9e16368c2b42da7aaf964f7c2a0fe43856af8cccc4af8855e4d709f35a968358" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.770381 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdpsz" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.819523 5081 scope.go:117] "RemoveContainer" containerID="b13fce585954a9f620b391abadab31b4eec30eaf53d087f228848e6070be8040" Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.821790 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdpsz"] Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.841277 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdpsz"] Oct 03 17:52:59 crc kubenswrapper[5081]: I1003 17:52:59.843056 5081 scope.go:117] "RemoveContainer" containerID="1ffcef04404598e2d0d01ab42294c45401aab2272d6f50a808dcede35f6f81dd" Oct 03 17:53:01 crc kubenswrapper[5081]: I1003 17:53:01.850997 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" path="/var/lib/kubelet/pods/b3137283-f74c-4858-97a0-9eef1cc3de4f/volumes" Oct 03 17:53:02 crc kubenswrapper[5081]: I1003 17:53:02.828217 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:53:02 crc kubenswrapper[5081]: E1003 17:53:02.828759 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:53:15 crc kubenswrapper[5081]: I1003 17:53:15.828878 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:53:15 crc kubenswrapper[5081]: E1003 17:53:15.829954 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:53:29 crc kubenswrapper[5081]: I1003 17:53:29.830959 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:53:29 crc kubenswrapper[5081]: E1003 17:53:29.832548 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:53:43 crc kubenswrapper[5081]: I1003 17:53:43.828215 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:53:43 crc kubenswrapper[5081]: E1003 17:53:43.829413 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:53:58 crc kubenswrapper[5081]: I1003 17:53:58.828726 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:53:58 crc kubenswrapper[5081]: E1003 17:53:58.829487 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:54:09 crc kubenswrapper[5081]: I1003 17:54:09.828017 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:54:09 crc kubenswrapper[5081]: E1003 17:54:09.828909 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:54:22 crc kubenswrapper[5081]: I1003 17:54:22.828230 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:54:22 crc kubenswrapper[5081]: E1003 17:54:22.829126 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:54:36 crc kubenswrapper[5081]: I1003 17:54:36.827875 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:54:36 crc kubenswrapper[5081]: E1003 17:54:36.829045 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:54:40 crc kubenswrapper[5081]: I1003 17:54:40.956296 5081 generic.go:334] "Generic (PLEG): container finished" podID="8c4c91ae-3176-4f71-862a-e818d8d7f212" containerID="37b0f0b82c48a1602b179200fc356c187808999ee76c79c2c1f79b7558c6a4f7" exitCode=0 Oct 03 17:54:40 crc kubenswrapper[5081]: I1003 17:54:40.956656 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" event={"ID":"8c4c91ae-3176-4f71-862a-e818d8d7f212","Type":"ContainerDied","Data":"37b0f0b82c48a1602b179200fc356c187808999ee76c79c2c1f79b7558c6a4f7"} Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.523751 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.649881 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-agent-neutron-config-0\") pod \"8c4c91ae-3176-4f71-862a-e818d8d7f212\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.649964 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcdfg\" (UniqueName: \"kubernetes.io/projected/8c4c91ae-3176-4f71-862a-e818d8d7f212-kube-api-access-xcdfg\") pod \"8c4c91ae-3176-4f71-862a-e818d8d7f212\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.650073 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ssh-key\") pod \"8c4c91ae-3176-4f71-862a-e818d8d7f212\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.650103 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-inventory\") pod \"8c4c91ae-3176-4f71-862a-e818d8d7f212\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.650227 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ceph\") pod \"8c4c91ae-3176-4f71-862a-e818d8d7f212\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.650279 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-combined-ca-bundle\") pod \"8c4c91ae-3176-4f71-862a-e818d8d7f212\" (UID: \"8c4c91ae-3176-4f71-862a-e818d8d7f212\") " Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.655433 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "8c4c91ae-3176-4f71-862a-e818d8d7f212" (UID: "8c4c91ae-3176-4f71-862a-e818d8d7f212"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.658929 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c4c91ae-3176-4f71-862a-e818d8d7f212-kube-api-access-xcdfg" (OuterVolumeSpecName: "kube-api-access-xcdfg") pod "8c4c91ae-3176-4f71-862a-e818d8d7f212" (UID: "8c4c91ae-3176-4f71-862a-e818d8d7f212"). InnerVolumeSpecName "kube-api-access-xcdfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.659726 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ceph" (OuterVolumeSpecName: "ceph") pod "8c4c91ae-3176-4f71-862a-e818d8d7f212" (UID: "8c4c91ae-3176-4f71-862a-e818d8d7f212"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.678301 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-inventory" (OuterVolumeSpecName: "inventory") pod "8c4c91ae-3176-4f71-862a-e818d8d7f212" (UID: "8c4c91ae-3176-4f71-862a-e818d8d7f212"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.678339 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "8c4c91ae-3176-4f71-862a-e818d8d7f212" (UID: "8c4c91ae-3176-4f71-862a-e818d8d7f212"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.699646 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8c4c91ae-3176-4f71-862a-e818d8d7f212" (UID: "8c4c91ae-3176-4f71-862a-e818d8d7f212"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.752947 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.753083 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.753161 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcdfg\" (UniqueName: \"kubernetes.io/projected/8c4c91ae-3176-4f71-862a-e818d8d7f212-kube-api-access-xcdfg\") on node \"crc\" DevicePath \"\"" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.753248 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.753324 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.753402 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c4c91ae-3176-4f71-862a-e818d8d7f212-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.977210 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" event={"ID":"8c4c91ae-3176-4f71-862a-e818d8d7f212","Type":"ContainerDied","Data":"200660c2cca5c9e4f5e6c7b7ca390af2393a572a76c6f779bf491d0873130cae"} Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.977624 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="200660c2cca5c9e4f5e6c7b7ca390af2393a572a76c6f779bf491d0873130cae" Oct 03 17:54:42 crc kubenswrapper[5081]: I1003 17:54:42.977251 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-dnmvd" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.119144 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs"] Oct 03 17:54:43 crc kubenswrapper[5081]: E1003 17:54:43.119695 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="extract-content" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.119714 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="extract-content" Oct 03 17:54:43 crc kubenswrapper[5081]: E1003 17:54:43.119734 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="registry-server" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.119896 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="registry-server" Oct 03 17:54:43 crc kubenswrapper[5081]: E1003 17:54:43.119933 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="extract-utilities" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.119940 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="extract-utilities" Oct 03 17:54:43 crc kubenswrapper[5081]: E1003 17:54:43.119948 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4c91ae-3176-4f71-862a-e818d8d7f212" containerName="neutron-sriov-openstack-openstack-cell1" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.119953 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4c91ae-3176-4f71-862a-e818d8d7f212" containerName="neutron-sriov-openstack-openstack-cell1" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.120156 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3137283-f74c-4858-97a0-9eef1cc3de4f" containerName="registry-server" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.120188 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4c91ae-3176-4f71-862a-e818d8d7f212" containerName="neutron-sriov-openstack-openstack-cell1" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.120992 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.123282 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.123282 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.123285 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.123741 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.123974 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.133252 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs"] Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.263047 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.263111 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqr5w\" (UniqueName: \"kubernetes.io/projected/acec17fb-26c9-474e-a337-31044887b6fe-kube-api-access-kqr5w\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.263138 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.263280 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.263476 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.263626 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.365646 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.365713 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.365754 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqr5w\" (UniqueName: \"kubernetes.io/projected/acec17fb-26c9-474e-a337-31044887b6fe-kube-api-access-kqr5w\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.365776 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.365857 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.365963 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.370444 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.370447 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.370638 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.373166 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.373341 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.388306 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqr5w\" (UniqueName: \"kubernetes.io/projected/acec17fb-26c9-474e-a337-31044887b6fe-kube-api-access-kqr5w\") pod \"neutron-dhcp-openstack-openstack-cell1-6wdgs\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:43 crc kubenswrapper[5081]: I1003 17:54:43.437249 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:54:44 crc kubenswrapper[5081]: I1003 17:54:44.028533 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs"] Oct 03 17:54:44 crc kubenswrapper[5081]: W1003 17:54:44.031902 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacec17fb_26c9_474e_a337_31044887b6fe.slice/crio-60babbd94683fbab2a5a34304d712fe2d583dd4fecddb0b43475e95c17ca1bd2 WatchSource:0}: Error finding container 60babbd94683fbab2a5a34304d712fe2d583dd4fecddb0b43475e95c17ca1bd2: Status 404 returned error can't find the container with id 60babbd94683fbab2a5a34304d712fe2d583dd4fecddb0b43475e95c17ca1bd2 Oct 03 17:54:44 crc kubenswrapper[5081]: I1003 17:54:44.996476 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" event={"ID":"acec17fb-26c9-474e-a337-31044887b6fe","Type":"ContainerStarted","Data":"18d00b83bc67ad703268050839415a1ffda8413ee166cdaefa29c76d1a74a03a"} Oct 03 17:54:44 crc kubenswrapper[5081]: I1003 17:54:44.996770 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" event={"ID":"acec17fb-26c9-474e-a337-31044887b6fe","Type":"ContainerStarted","Data":"60babbd94683fbab2a5a34304d712fe2d583dd4fecddb0b43475e95c17ca1bd2"} Oct 03 17:54:51 crc kubenswrapper[5081]: I1003 17:54:51.836890 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:54:51 crc kubenswrapper[5081]: E1003 17:54:51.839199 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 17:55:04 crc kubenswrapper[5081]: I1003 17:55:04.828765 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:55:05 crc kubenswrapper[5081]: I1003 17:55:05.191031 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"7960da5be1683ebe72e4312f9b6be5953a6337b9dd10325cd4ffa8ae67227b54"} Oct 03 17:55:05 crc kubenswrapper[5081]: I1003 17:55:05.217582 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" podStartSLOduration=21.67170086 podStartE2EDuration="22.217551328s" podCreationTimestamp="2025-10-03 17:54:43 +0000 UTC" firstStartedPulling="2025-10-03 17:54:44.034123273 +0000 UTC m=+8802.999679886" lastFinishedPulling="2025-10-03 17:54:44.579973711 +0000 UTC m=+8803.545530354" observedRunningTime="2025-10-03 17:54:45.022383178 +0000 UTC m=+8803.987939791" watchObservedRunningTime="2025-10-03 17:55:05.217551328 +0000 UTC m=+8824.183107941" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.568526 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zlp82"] Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.573113 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.584292 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zlp82"] Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.713283 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-utilities\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.713540 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-catalog-content\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.713967 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9zgm\" (UniqueName: \"kubernetes.io/projected/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-kube-api-access-c9zgm\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.815590 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-utilities\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.815899 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-catalog-content\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.816123 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-utilities\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.816212 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-catalog-content\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:53 crc kubenswrapper[5081]: I1003 17:55:53.816650 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9zgm\" (UniqueName: \"kubernetes.io/projected/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-kube-api-access-c9zgm\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:54 crc kubenswrapper[5081]: I1003 17:55:54.404472 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9zgm\" (UniqueName: \"kubernetes.io/projected/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-kube-api-access-c9zgm\") pod \"redhat-operators-zlp82\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:54 crc kubenswrapper[5081]: I1003 17:55:54.512188 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:55:54 crc kubenswrapper[5081]: I1003 17:55:54.987101 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zlp82"] Oct 03 17:55:55 crc kubenswrapper[5081]: I1003 17:55:55.733003 5081 generic.go:334] "Generic (PLEG): container finished" podID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerID="912a12e54146c3339858989537f85facb68fcd787703b0333e144fa080f9fc72" exitCode=0 Oct 03 17:55:55 crc kubenswrapper[5081]: I1003 17:55:55.733330 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlp82" event={"ID":"aafb5cc8-2991-45c6-b3cf-c6405da33f2e","Type":"ContainerDied","Data":"912a12e54146c3339858989537f85facb68fcd787703b0333e144fa080f9fc72"} Oct 03 17:55:55 crc kubenswrapper[5081]: I1003 17:55:55.733367 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlp82" event={"ID":"aafb5cc8-2991-45c6-b3cf-c6405da33f2e","Type":"ContainerStarted","Data":"7723ab13f57b09a2a59256d4ec7254cc826897611a0ec83aa7a8bbfcae589534"} Oct 03 17:55:55 crc kubenswrapper[5081]: I1003 17:55:55.737054 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 17:55:56 crc kubenswrapper[5081]: I1003 17:55:56.748070 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlp82" event={"ID":"aafb5cc8-2991-45c6-b3cf-c6405da33f2e","Type":"ContainerStarted","Data":"f2f578e434f5a37ea407f4851f7f9968332014c56ea17a0959783a79f997d7b6"} Oct 03 17:55:57 crc kubenswrapper[5081]: I1003 17:55:57.765258 5081 generic.go:334] "Generic (PLEG): container finished" podID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerID="f2f578e434f5a37ea407f4851f7f9968332014c56ea17a0959783a79f997d7b6" exitCode=0 Oct 03 17:55:57 crc kubenswrapper[5081]: I1003 17:55:57.765366 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlp82" event={"ID":"aafb5cc8-2991-45c6-b3cf-c6405da33f2e","Type":"ContainerDied","Data":"f2f578e434f5a37ea407f4851f7f9968332014c56ea17a0959783a79f997d7b6"} Oct 03 17:55:58 crc kubenswrapper[5081]: I1003 17:55:58.779754 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlp82" event={"ID":"aafb5cc8-2991-45c6-b3cf-c6405da33f2e","Type":"ContainerStarted","Data":"59d8a8711c5bd5dd06327e9e03d116b9f8959fcc0d5cf7950701c46e9443cd51"} Oct 03 17:55:58 crc kubenswrapper[5081]: I1003 17:55:58.802748 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zlp82" podStartSLOduration=3.242370083 podStartE2EDuration="5.802724791s" podCreationTimestamp="2025-10-03 17:55:53 +0000 UTC" firstStartedPulling="2025-10-03 17:55:55.73672701 +0000 UTC m=+8874.702283633" lastFinishedPulling="2025-10-03 17:55:58.297081728 +0000 UTC m=+8877.262638341" observedRunningTime="2025-10-03 17:55:58.795098622 +0000 UTC m=+8877.760655235" watchObservedRunningTime="2025-10-03 17:55:58.802724791 +0000 UTC m=+8877.768281394" Oct 03 17:56:04 crc kubenswrapper[5081]: I1003 17:56:04.512763 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:56:04 crc kubenswrapper[5081]: I1003 17:56:04.513578 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:56:04 crc kubenswrapper[5081]: I1003 17:56:04.578836 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:56:05 crc kubenswrapper[5081]: I1003 17:56:05.385674 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:56:05 crc kubenswrapper[5081]: I1003 17:56:05.446928 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zlp82"] Oct 03 17:56:06 crc kubenswrapper[5081]: I1003 17:56:06.862835 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zlp82" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="registry-server" containerID="cri-o://59d8a8711c5bd5dd06327e9e03d116b9f8959fcc0d5cf7950701c46e9443cd51" gracePeriod=2 Oct 03 17:56:07 crc kubenswrapper[5081]: I1003 17:56:07.883398 5081 generic.go:334] "Generic (PLEG): container finished" podID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerID="59d8a8711c5bd5dd06327e9e03d116b9f8959fcc0d5cf7950701c46e9443cd51" exitCode=0 Oct 03 17:56:07 crc kubenswrapper[5081]: I1003 17:56:07.883510 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlp82" event={"ID":"aafb5cc8-2991-45c6-b3cf-c6405da33f2e","Type":"ContainerDied","Data":"59d8a8711c5bd5dd06327e9e03d116b9f8959fcc0d5cf7950701c46e9443cd51"} Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.358806 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.463904 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-catalog-content\") pod \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.463970 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-utilities\") pod \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.464041 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9zgm\" (UniqueName: \"kubernetes.io/projected/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-kube-api-access-c9zgm\") pod \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\" (UID: \"aafb5cc8-2991-45c6-b3cf-c6405da33f2e\") " Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.466218 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-utilities" (OuterVolumeSpecName: "utilities") pod "aafb5cc8-2991-45c6-b3cf-c6405da33f2e" (UID: "aafb5cc8-2991-45c6-b3cf-c6405da33f2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.473961 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-kube-api-access-c9zgm" (OuterVolumeSpecName: "kube-api-access-c9zgm") pod "aafb5cc8-2991-45c6-b3cf-c6405da33f2e" (UID: "aafb5cc8-2991-45c6-b3cf-c6405da33f2e"). InnerVolumeSpecName "kube-api-access-c9zgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.566986 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9zgm\" (UniqueName: \"kubernetes.io/projected/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-kube-api-access-c9zgm\") on node \"crc\" DevicePath \"\"" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.567024 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.901663 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlp82" event={"ID":"aafb5cc8-2991-45c6-b3cf-c6405da33f2e","Type":"ContainerDied","Data":"7723ab13f57b09a2a59256d4ec7254cc826897611a0ec83aa7a8bbfcae589534"} Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.902020 5081 scope.go:117] "RemoveContainer" containerID="59d8a8711c5bd5dd06327e9e03d116b9f8959fcc0d5cf7950701c46e9443cd51" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.901782 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlp82" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.928688 5081 scope.go:117] "RemoveContainer" containerID="f2f578e434f5a37ea407f4851f7f9968332014c56ea17a0959783a79f997d7b6" Oct 03 17:56:08 crc kubenswrapper[5081]: I1003 17:56:08.949169 5081 scope.go:117] "RemoveContainer" containerID="912a12e54146c3339858989537f85facb68fcd787703b0333e144fa080f9fc72" Oct 03 17:56:09 crc kubenswrapper[5081]: I1003 17:56:09.066633 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aafb5cc8-2991-45c6-b3cf-c6405da33f2e" (UID: "aafb5cc8-2991-45c6-b3cf-c6405da33f2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 17:56:09 crc kubenswrapper[5081]: I1003 17:56:09.076623 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aafb5cc8-2991-45c6-b3cf-c6405da33f2e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 17:56:09 crc kubenswrapper[5081]: I1003 17:56:09.258252 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zlp82"] Oct 03 17:56:09 crc kubenswrapper[5081]: I1003 17:56:09.271699 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zlp82"] Oct 03 17:56:09 crc kubenswrapper[5081]: I1003 17:56:09.850624 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" path="/var/lib/kubelet/pods/aafb5cc8-2991-45c6-b3cf-c6405da33f2e/volumes" Oct 03 17:57:30 crc kubenswrapper[5081]: I1003 17:57:30.648028 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:57:30 crc kubenswrapper[5081]: I1003 17:57:30.649684 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:58:00 crc kubenswrapper[5081]: I1003 17:58:00.647284 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:58:00 crc kubenswrapper[5081]: I1003 17:58:00.647779 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:58:30 crc kubenswrapper[5081]: I1003 17:58:30.647951 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 17:58:30 crc kubenswrapper[5081]: I1003 17:58:30.648689 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 17:58:30 crc kubenswrapper[5081]: I1003 17:58:30.648750 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 17:58:30 crc kubenswrapper[5081]: I1003 17:58:30.649489 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7960da5be1683ebe72e4312f9b6be5953a6337b9dd10325cd4ffa8ae67227b54"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 17:58:30 crc kubenswrapper[5081]: I1003 17:58:30.649530 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://7960da5be1683ebe72e4312f9b6be5953a6337b9dd10325cd4ffa8ae67227b54" gracePeriod=600 Oct 03 17:58:31 crc kubenswrapper[5081]: I1003 17:58:31.572281 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="7960da5be1683ebe72e4312f9b6be5953a6337b9dd10325cd4ffa8ae67227b54" exitCode=0 Oct 03 17:58:31 crc kubenswrapper[5081]: I1003 17:58:31.572359 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"7960da5be1683ebe72e4312f9b6be5953a6337b9dd10325cd4ffa8ae67227b54"} Oct 03 17:58:31 crc kubenswrapper[5081]: I1003 17:58:31.572914 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401"} Oct 03 17:58:31 crc kubenswrapper[5081]: I1003 17:58:31.572938 5081 scope.go:117] "RemoveContainer" containerID="05c5d111b1d8ebb095a02605c4efff4f2ce9c2ed30485ff845cb49434a775c8a" Oct 03 17:59:51 crc kubenswrapper[5081]: I1003 17:59:51.502993 5081 generic.go:334] "Generic (PLEG): container finished" podID="acec17fb-26c9-474e-a337-31044887b6fe" containerID="18d00b83bc67ad703268050839415a1ffda8413ee166cdaefa29c76d1a74a03a" exitCode=0 Oct 03 17:59:51 crc kubenswrapper[5081]: I1003 17:59:51.503205 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" event={"ID":"acec17fb-26c9-474e-a337-31044887b6fe","Type":"ContainerDied","Data":"18d00b83bc67ad703268050839415a1ffda8413ee166cdaefa29c76d1a74a03a"} Oct 03 17:59:52 crc kubenswrapper[5081]: I1003 17:59:52.996865 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.131268 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-agent-neutron-config-0\") pod \"acec17fb-26c9-474e-a337-31044887b6fe\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.131657 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-inventory\") pod \"acec17fb-26c9-474e-a337-31044887b6fe\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.131712 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqr5w\" (UniqueName: \"kubernetes.io/projected/acec17fb-26c9-474e-a337-31044887b6fe-kube-api-access-kqr5w\") pod \"acec17fb-26c9-474e-a337-31044887b6fe\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.131753 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ceph\") pod \"acec17fb-26c9-474e-a337-31044887b6fe\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.131787 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ssh-key\") pod \"acec17fb-26c9-474e-a337-31044887b6fe\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.131873 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-combined-ca-bundle\") pod \"acec17fb-26c9-474e-a337-31044887b6fe\" (UID: \"acec17fb-26c9-474e-a337-31044887b6fe\") " Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.140460 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acec17fb-26c9-474e-a337-31044887b6fe-kube-api-access-kqr5w" (OuterVolumeSpecName: "kube-api-access-kqr5w") pod "acec17fb-26c9-474e-a337-31044887b6fe" (UID: "acec17fb-26c9-474e-a337-31044887b6fe"). InnerVolumeSpecName "kube-api-access-kqr5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.141872 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ceph" (OuterVolumeSpecName: "ceph") pod "acec17fb-26c9-474e-a337-31044887b6fe" (UID: "acec17fb-26c9-474e-a337-31044887b6fe"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.141913 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "acec17fb-26c9-474e-a337-31044887b6fe" (UID: "acec17fb-26c9-474e-a337-31044887b6fe"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.164236 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-inventory" (OuterVolumeSpecName: "inventory") pod "acec17fb-26c9-474e-a337-31044887b6fe" (UID: "acec17fb-26c9-474e-a337-31044887b6fe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.169817 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "acec17fb-26c9-474e-a337-31044887b6fe" (UID: "acec17fb-26c9-474e-a337-31044887b6fe"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.179871 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "acec17fb-26c9-474e-a337-31044887b6fe" (UID: "acec17fb-26c9-474e-a337-31044887b6fe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.234513 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.234550 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.234564 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqr5w\" (UniqueName: \"kubernetes.io/projected/acec17fb-26c9-474e-a337-31044887b6fe-kube-api-access-kqr5w\") on node \"crc\" DevicePath \"\"" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.234585 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.234595 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.234603 5081 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acec17fb-26c9-474e-a337-31044887b6fe-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.530803 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" event={"ID":"acec17fb-26c9-474e-a337-31044887b6fe","Type":"ContainerDied","Data":"60babbd94683fbab2a5a34304d712fe2d583dd4fecddb0b43475e95c17ca1bd2"} Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.530861 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60babbd94683fbab2a5a34304d712fe2d583dd4fecddb0b43475e95c17ca1bd2" Oct 03 17:59:53 crc kubenswrapper[5081]: I1003 17:59:53.530947 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-6wdgs" Oct 03 17:59:57 crc kubenswrapper[5081]: I1003 17:59:57.119748 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 17:59:57 crc kubenswrapper[5081]: I1003 17:59:57.120514 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" containerName="nova-cell0-conductor-conductor" containerID="cri-o://d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d" gracePeriod=30 Oct 03 17:59:57 crc kubenswrapper[5081]: I1003 17:59:57.157584 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 17:59:57 crc kubenswrapper[5081]: I1003 17:59:57.157771 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" containerName="nova-cell1-conductor-conductor" containerID="cri-o://4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37" gracePeriod=30 Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.262714 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.267446 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-log" containerID="cri-o://c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a" gracePeriod=30 Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.267727 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-api" containerID="cri-o://e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734" gracePeriod=30 Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.305407 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.305867 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="da03a230-eac7-4927-a20f-c680a7647aa3" containerName="nova-scheduler-scheduler" containerID="cri-o://b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7" gracePeriod=30 Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.322419 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.322816 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-log" containerID="cri-o://8cce5a28488bbf244a3f741d2edcd6af96637dc4382513e573271312e771dce2" gracePeriod=30 Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.322863 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-metadata" containerID="cri-o://3983b21ea76676c2f8ce9fb09b8ed8a3748f827017101ce24cc74bd0154b826f" gracePeriod=30 Oct 03 17:59:58 crc kubenswrapper[5081]: E1003 17:59:58.438903 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 17:59:58 crc kubenswrapper[5081]: E1003 17:59:58.442047 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 17:59:58 crc kubenswrapper[5081]: E1003 17:59:58.445677 5081 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 03 17:59:58 crc kubenswrapper[5081]: E1003 17:59:58.445741 5081 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" containerName="nova-cell1-conductor-conductor" Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.586627 5081 generic.go:334] "Generic (PLEG): container finished" podID="41b108a7-949e-41e0-984d-21845d76d4eb" containerID="8cce5a28488bbf244a3f741d2edcd6af96637dc4382513e573271312e771dce2" exitCode=143 Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.586765 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41b108a7-949e-41e0-984d-21845d76d4eb","Type":"ContainerDied","Data":"8cce5a28488bbf244a3f741d2edcd6af96637dc4382513e573271312e771dce2"} Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.590786 5081 generic.go:334] "Generic (PLEG): container finished" podID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerID="c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a" exitCode=143 Oct 03 17:59:58 crc kubenswrapper[5081]: I1003 17:59:58.590888 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ee0241b5-68ec-419c-8f4a-64d92e49a60b","Type":"ContainerDied","Data":"c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a"} Oct 03 17:59:59 crc kubenswrapper[5081]: I1003 17:59:59.615477 5081 generic.go:334] "Generic (PLEG): container finished" podID="b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" containerID="4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37" exitCode=0 Oct 03 17:59:59 crc kubenswrapper[5081]: I1003 17:59:59.615523 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156","Type":"ContainerDied","Data":"4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37"} Oct 03 17:59:59 crc kubenswrapper[5081]: I1003 17:59:59.824428 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.022769 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hb6w\" (UniqueName: \"kubernetes.io/projected/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-kube-api-access-5hb6w\") pod \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.022944 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-config-data\") pod \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.023005 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-combined-ca-bundle\") pod \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\" (UID: \"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156\") " Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.042854 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-kube-api-access-5hb6w" (OuterVolumeSpecName: "kube-api-access-5hb6w") pod "b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" (UID: "b8b1902e-cdaf-43a2-b41b-b3b93ebc9156"). InnerVolumeSpecName "kube-api-access-5hb6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.057193 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-config-data" (OuterVolumeSpecName: "config-data") pod "b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" (UID: "b8b1902e-cdaf-43a2-b41b-b3b93ebc9156"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.073722 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" (UID: "b8b1902e-cdaf-43a2-b41b-b3b93ebc9156"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.128672 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hb6w\" (UniqueName: \"kubernetes.io/projected/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-kube-api-access-5hb6w\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.128721 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.128733 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153007 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb"] Oct 03 18:00:00 crc kubenswrapper[5081]: E1003 18:00:00.153552 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="registry-server" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153578 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="registry-server" Oct 03 18:00:00 crc kubenswrapper[5081]: E1003 18:00:00.153600 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" containerName="nova-cell1-conductor-conductor" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153605 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" containerName="nova-cell1-conductor-conductor" Oct 03 18:00:00 crc kubenswrapper[5081]: E1003 18:00:00.153617 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="extract-content" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153623 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="extract-content" Oct 03 18:00:00 crc kubenswrapper[5081]: E1003 18:00:00.153639 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="extract-utilities" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153645 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="extract-utilities" Oct 03 18:00:00 crc kubenswrapper[5081]: E1003 18:00:00.153732 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acec17fb-26c9-474e-a337-31044887b6fe" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153739 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="acec17fb-26c9-474e-a337-31044887b6fe" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153944 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="aafb5cc8-2991-45c6-b3cf-c6405da33f2e" containerName="registry-server" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153966 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" containerName="nova-cell1-conductor-conductor" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.153980 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="acec17fb-26c9-474e-a337-31044887b6fe" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.154869 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.156939 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.158171 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.166481 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.271774 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.336116 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdxpv\" (UniqueName: \"kubernetes.io/projected/722dcb0f-93f4-4a37-a8c2-75c6a301a047-kube-api-access-rdxpv\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.336230 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/722dcb0f-93f4-4a37-a8c2-75c6a301a047-config-volume\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.336303 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/722dcb0f-93f4-4a37-a8c2-75c6a301a047-secret-volume\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.439292 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-combined-ca-bundle\") pod \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.439642 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-config-data\") pod \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.439674 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ptm2\" (UniqueName: \"kubernetes.io/projected/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-kube-api-access-9ptm2\") pod \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\" (UID: \"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21\") " Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.440091 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdxpv\" (UniqueName: \"kubernetes.io/projected/722dcb0f-93f4-4a37-a8c2-75c6a301a047-kube-api-access-rdxpv\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.440159 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/722dcb0f-93f4-4a37-a8c2-75c6a301a047-config-volume\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.440221 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/722dcb0f-93f4-4a37-a8c2-75c6a301a047-secret-volume\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.444890 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/722dcb0f-93f4-4a37-a8c2-75c6a301a047-config-volume\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.445187 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-kube-api-access-9ptm2" (OuterVolumeSpecName: "kube-api-access-9ptm2") pod "0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" (UID: "0b0aae61-854d-4654-b9d4-c8ca8f8a3c21"). InnerVolumeSpecName "kube-api-access-9ptm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.446706 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/722dcb0f-93f4-4a37-a8c2-75c6a301a047-secret-volume\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.462812 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdxpv\" (UniqueName: \"kubernetes.io/projected/722dcb0f-93f4-4a37-a8c2-75c6a301a047-kube-api-access-rdxpv\") pod \"collect-profiles-29325240-xmvfb\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.482936 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" (UID: "0b0aae61-854d-4654-b9d4-c8ca8f8a3c21"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.485607 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.488544 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-config-data" (OuterVolumeSpecName: "config-data") pod "0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" (UID: "0b0aae61-854d-4654-b9d4-c8ca8f8a3c21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.542012 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.542052 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ptm2\" (UniqueName: \"kubernetes.io/projected/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-kube-api-access-9ptm2\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.542064 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.635898 5081 generic.go:334] "Generic (PLEG): container finished" podID="0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" containerID="d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d" exitCode=0 Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.635998 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.636028 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21","Type":"ContainerDied","Data":"d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d"} Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.636617 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0b0aae61-854d-4654-b9d4-c8ca8f8a3c21","Type":"ContainerDied","Data":"e256609c619a0ab89506e1efb2a741e9d1143c7c4a30036097ddcaf30d245567"} Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.636674 5081 scope.go:117] "RemoveContainer" containerID="d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.652983 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b8b1902e-cdaf-43a2-b41b-b3b93ebc9156","Type":"ContainerDied","Data":"c58eec509086192daec1bfc8cbb0dd4650032f77afb675d2dec20a99b278ca38"} Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.653046 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.685539 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.700260 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.707952 5081 scope.go:117] "RemoveContainer" containerID="d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d" Oct 03 18:00:00 crc kubenswrapper[5081]: E1003 18:00:00.708678 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d\": container with ID starting with d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d not found: ID does not exist" containerID="d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.708720 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d"} err="failed to get container status \"d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d\": rpc error: code = NotFound desc = could not find container \"d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d\": container with ID starting with d67d6019efb52778ccb4b9b7dcb35fd66f543ae969b38c20043c5afd23258f3d not found: ID does not exist" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.708785 5081 scope.go:117] "RemoveContainer" containerID="4e2e2acc9bcf38aed76ddbf71c6a490d1848bd6a3c73c27a902b0af5e3a42c37" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.728681 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: E1003 18:00:00.729352 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" containerName="nova-cell0-conductor-conductor" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.729370 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" containerName="nova-cell0-conductor-conductor" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.729649 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" containerName="nova-cell0-conductor-conductor" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.730669 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.733213 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.739743 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.753995 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.766501 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.776781 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.778805 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.782159 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.794859 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.849597 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311e8141-0ab3-4921-b678-528ba5e545f0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.849648 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp747\" (UniqueName: \"kubernetes.io/projected/311e8141-0ab3-4921-b678-528ba5e545f0-kube-api-access-cp747\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.849938 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311e8141-0ab3-4921-b678-528ba5e545f0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.951397 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lxnw\" (UniqueName: \"kubernetes.io/projected/a25ab879-1c41-4b6e-920c-51903f580487-kube-api-access-6lxnw\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.951480 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311e8141-0ab3-4921-b678-528ba5e545f0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.952081 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311e8141-0ab3-4921-b678-528ba5e545f0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.952144 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25ab879-1c41-4b6e-920c-51903f580487-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.952206 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp747\" (UniqueName: \"kubernetes.io/projected/311e8141-0ab3-4921-b678-528ba5e545f0-kube-api-access-cp747\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.952537 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25ab879-1c41-4b6e-920c-51903f580487-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.958700 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311e8141-0ab3-4921-b678-528ba5e545f0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.959193 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311e8141-0ab3-4921-b678-528ba5e545f0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:00 crc kubenswrapper[5081]: I1003 18:00:00.971103 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp747\" (UniqueName: \"kubernetes.io/projected/311e8141-0ab3-4921-b678-528ba5e545f0-kube-api-access-cp747\") pod \"nova-cell0-conductor-0\" (UID: \"311e8141-0ab3-4921-b678-528ba5e545f0\") " pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.017862 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb"] Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.054495 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25ab879-1c41-4b6e-920c-51903f580487-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.054597 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lxnw\" (UniqueName: \"kubernetes.io/projected/a25ab879-1c41-4b6e-920c-51903f580487-kube-api-access-6lxnw\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.054758 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25ab879-1c41-4b6e-920c-51903f580487-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.058126 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.059984 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25ab879-1c41-4b6e-920c-51903f580487-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.066122 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25ab879-1c41-4b6e-920c-51903f580487-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.074430 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lxnw\" (UniqueName: \"kubernetes.io/projected/a25ab879-1c41-4b6e-920c-51903f580487-kube-api-access-6lxnw\") pod \"nova-cell1-conductor-0\" (UID: \"a25ab879-1c41-4b6e-920c-51903f580487\") " pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.108131 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.541947 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": dial tcp 10.217.1.83:8775: connect: connection refused" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.542066 5081 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": dial tcp 10.217.1.83:8775: connect: connection refused" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.546120 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.674758 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"311e8141-0ab3-4921-b678-528ba5e545f0","Type":"ContainerStarted","Data":"34f6e067a957b201d68d0b11831ebe770b3fbb26f934445d31c0f2ec3e640ce9"} Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.696275 5081 generic.go:334] "Generic (PLEG): container finished" podID="41b108a7-949e-41e0-984d-21845d76d4eb" containerID="3983b21ea76676c2f8ce9fb09b8ed8a3748f827017101ce24cc74bd0154b826f" exitCode=0 Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.696351 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41b108a7-949e-41e0-984d-21845d76d4eb","Type":"ContainerDied","Data":"3983b21ea76676c2f8ce9fb09b8ed8a3748f827017101ce24cc74bd0154b826f"} Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.719778 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" event={"ID":"722dcb0f-93f4-4a37-a8c2-75c6a301a047","Type":"ContainerStarted","Data":"db9d8c5737e51029cfbdefe0519d6ef5b469edeb96cc560163b077c6b364a479"} Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.946977 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b0aae61-854d-4654-b9d4-c8ca8f8a3c21" path="/var/lib/kubelet/pods/0b0aae61-854d-4654-b9d4-c8ca8f8a3c21/volumes" Oct 03 18:00:01 crc kubenswrapper[5081]: I1003 18:00:01.948798 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8b1902e-cdaf-43a2-b41b-b3b93ebc9156" path="/var/lib/kubelet/pods/b8b1902e-cdaf-43a2-b41b-b3b93ebc9156/volumes" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.187509 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.202737 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.259629 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: W1003 18:00:02.275889 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda25ab879_1c41_4b6e_920c_51903f580487.slice/crio-4913df83f32b4d88611173b5552c98dc46eff699800ccaaa260b8bb5e3bc1d40 WatchSource:0}: Error finding container 4913df83f32b4d88611173b5552c98dc46eff699800ccaaa260b8bb5e3bc1d40: Status 404 returned error can't find the container with id 4913df83f32b4d88611173b5552c98dc46eff699800ccaaa260b8bb5e3bc1d40 Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316130 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-combined-ca-bundle\") pod \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316225 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41b108a7-949e-41e0-984d-21845d76d4eb-logs\") pod \"41b108a7-949e-41e0-984d-21845d76d4eb\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316286 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-config-data\") pod \"41b108a7-949e-41e0-984d-21845d76d4eb\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316454 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee0241b5-68ec-419c-8f4a-64d92e49a60b-logs\") pod \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316469 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2szx9\" (UniqueName: \"kubernetes.io/projected/ee0241b5-68ec-419c-8f4a-64d92e49a60b-kube-api-access-2szx9\") pod \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316541 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-combined-ca-bundle\") pod \"41b108a7-949e-41e0-984d-21845d76d4eb\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316576 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-config-data\") pod \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\" (UID: \"ee0241b5-68ec-419c-8f4a-64d92e49a60b\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.316598 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nhvn\" (UniqueName: \"kubernetes.io/projected/41b108a7-949e-41e0-984d-21845d76d4eb-kube-api-access-5nhvn\") pod \"41b108a7-949e-41e0-984d-21845d76d4eb\" (UID: \"41b108a7-949e-41e0-984d-21845d76d4eb\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.318243 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee0241b5-68ec-419c-8f4a-64d92e49a60b-logs" (OuterVolumeSpecName: "logs") pod "ee0241b5-68ec-419c-8f4a-64d92e49a60b" (UID: "ee0241b5-68ec-419c-8f4a-64d92e49a60b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.319249 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41b108a7-949e-41e0-984d-21845d76d4eb-logs" (OuterVolumeSpecName: "logs") pod "41b108a7-949e-41e0-984d-21845d76d4eb" (UID: "41b108a7-949e-41e0-984d-21845d76d4eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.321052 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee0241b5-68ec-419c-8f4a-64d92e49a60b-kube-api-access-2szx9" (OuterVolumeSpecName: "kube-api-access-2szx9") pod "ee0241b5-68ec-419c-8f4a-64d92e49a60b" (UID: "ee0241b5-68ec-419c-8f4a-64d92e49a60b"). InnerVolumeSpecName "kube-api-access-2szx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.328469 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41b108a7-949e-41e0-984d-21845d76d4eb-kube-api-access-5nhvn" (OuterVolumeSpecName: "kube-api-access-5nhvn") pod "41b108a7-949e-41e0-984d-21845d76d4eb" (UID: "41b108a7-949e-41e0-984d-21845d76d4eb"). InnerVolumeSpecName "kube-api-access-5nhvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.353952 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee0241b5-68ec-419c-8f4a-64d92e49a60b" (UID: "ee0241b5-68ec-419c-8f4a-64d92e49a60b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.359178 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-config-data" (OuterVolumeSpecName: "config-data") pod "41b108a7-949e-41e0-984d-21845d76d4eb" (UID: "41b108a7-949e-41e0-984d-21845d76d4eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.365875 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-config-data" (OuterVolumeSpecName: "config-data") pod "ee0241b5-68ec-419c-8f4a-64d92e49a60b" (UID: "ee0241b5-68ec-419c-8f4a-64d92e49a60b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.419653 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.419692 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41b108a7-949e-41e0-984d-21845d76d4eb-logs\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.419707 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.419718 5081 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee0241b5-68ec-419c-8f4a-64d92e49a60b-logs\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.419729 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2szx9\" (UniqueName: \"kubernetes.io/projected/ee0241b5-68ec-419c-8f4a-64d92e49a60b-kube-api-access-2szx9\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.419742 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0241b5-68ec-419c-8f4a-64d92e49a60b-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.419752 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nhvn\" (UniqueName: \"kubernetes.io/projected/41b108a7-949e-41e0-984d-21845d76d4eb-kube-api-access-5nhvn\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.449131 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41b108a7-949e-41e0-984d-21845d76d4eb" (UID: "41b108a7-949e-41e0-984d-21845d76d4eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.522411 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41b108a7-949e-41e0-984d-21845d76d4eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.644532 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.738353 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a25ab879-1c41-4b6e-920c-51903f580487","Type":"ContainerStarted","Data":"4913df83f32b4d88611173b5552c98dc46eff699800ccaaa260b8bb5e3bc1d40"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.739722 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"311e8141-0ab3-4921-b678-528ba5e545f0","Type":"ContainerStarted","Data":"61ab68c2dd610c0bc168e31f16ebfbbd49bf6f87672c5bf113da49aaa187fd2f"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.739913 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.743809 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41b108a7-949e-41e0-984d-21845d76d4eb","Type":"ContainerDied","Data":"b7a6aac07d49018b7792034215adaf124e6f0393687ec3c83a709dd3239ecd06"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.743878 5081 scope.go:117] "RemoveContainer" containerID="3983b21ea76676c2f8ce9fb09b8ed8a3748f827017101ce24cc74bd0154b826f" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.744012 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.752288 5081 generic.go:334] "Generic (PLEG): container finished" podID="722dcb0f-93f4-4a37-a8c2-75c6a301a047" containerID="296165e932f2ad4eb8dc06248b5f1a996861581d68b0818c36bedea8305a1c3d" exitCode=0 Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.752453 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" event={"ID":"722dcb0f-93f4-4a37-a8c2-75c6a301a047","Type":"ContainerDied","Data":"296165e932f2ad4eb8dc06248b5f1a996861581d68b0818c36bedea8305a1c3d"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.755692 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.7556806099999998 podStartE2EDuration="2.75568061s" podCreationTimestamp="2025-10-03 18:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:00:02.754426334 +0000 UTC m=+9121.719982957" watchObservedRunningTime="2025-10-03 18:00:02.75568061 +0000 UTC m=+9121.721237213" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.760153 5081 generic.go:334] "Generic (PLEG): container finished" podID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerID="e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734" exitCode=0 Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.760223 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ee0241b5-68ec-419c-8f4a-64d92e49a60b","Type":"ContainerDied","Data":"e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.760245 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ee0241b5-68ec-419c-8f4a-64d92e49a60b","Type":"ContainerDied","Data":"140d15a752a8c7d4fa9a9e908656d3a44fa66cbd45f23bfc126e7851dc028b9e"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.760263 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.761870 5081 generic.go:334] "Generic (PLEG): container finished" podID="da03a230-eac7-4927-a20f-c680a7647aa3" containerID="b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7" exitCode=0 Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.761898 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da03a230-eac7-4927-a20f-c680a7647aa3","Type":"ContainerDied","Data":"b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.761911 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da03a230-eac7-4927-a20f-c680a7647aa3","Type":"ContainerDied","Data":"096c7f07c645429dcb5ba3253c2486b853f4a79e6195f1fbdd61deecd08fc78e"} Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.761956 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.782532 5081 scope.go:117] "RemoveContainer" containerID="8cce5a28488bbf244a3f741d2edcd6af96637dc4382513e573271312e771dce2" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.805246 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.822161 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.829073 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-combined-ca-bundle\") pod \"da03a230-eac7-4927-a20f-c680a7647aa3\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.829411 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krp8f\" (UniqueName: \"kubernetes.io/projected/da03a230-eac7-4927-a20f-c680a7647aa3-kube-api-access-krp8f\") pod \"da03a230-eac7-4927-a20f-c680a7647aa3\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.829470 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-config-data\") pod \"da03a230-eac7-4927-a20f-c680a7647aa3\" (UID: \"da03a230-eac7-4927-a20f-c680a7647aa3\") " Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.837990 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.851014 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da03a230-eac7-4927-a20f-c680a7647aa3-kube-api-access-krp8f" (OuterVolumeSpecName: "kube-api-access-krp8f") pod "da03a230-eac7-4927-a20f-c680a7647aa3" (UID: "da03a230-eac7-4927-a20f-c680a7647aa3"). InnerVolumeSpecName "kube-api-access-krp8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.851090 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.852196 5081 scope.go:117] "RemoveContainer" containerID="e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.866753 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: E1003 18:00:02.867326 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-metadata" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867344 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-metadata" Oct 03 18:00:02 crc kubenswrapper[5081]: E1003 18:00:02.867378 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-log" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867384 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-log" Oct 03 18:00:02 crc kubenswrapper[5081]: E1003 18:00:02.867402 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-log" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867408 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-log" Oct 03 18:00:02 crc kubenswrapper[5081]: E1003 18:00:02.867424 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-api" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867429 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-api" Oct 03 18:00:02 crc kubenswrapper[5081]: E1003 18:00:02.867443 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da03a230-eac7-4927-a20f-c680a7647aa3" containerName="nova-scheduler-scheduler" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867449 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="da03a230-eac7-4927-a20f-c680a7647aa3" containerName="nova-scheduler-scheduler" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867662 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="da03a230-eac7-4927-a20f-c680a7647aa3" containerName="nova-scheduler-scheduler" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867675 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-log" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867685 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" containerName="nova-api-api" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867695 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-log" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.867708 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" containerName="nova-metadata-metadata" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.870104 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.872193 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.876335 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.885143 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.887057 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.888708 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "da03a230-eac7-4927-a20f-c680a7647aa3" (UID: "da03a230-eac7-4927-a20f-c680a7647aa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.889952 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.898608 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.899813 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-config-data" (OuterVolumeSpecName: "config-data") pod "da03a230-eac7-4927-a20f-c680a7647aa3" (UID: "da03a230-eac7-4927-a20f-c680a7647aa3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.931842 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.931882 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krp8f\" (UniqueName: \"kubernetes.io/projected/da03a230-eac7-4927-a20f-c680a7647aa3-kube-api-access-krp8f\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.931901 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da03a230-eac7-4927-a20f-c680a7647aa3-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:02 crc kubenswrapper[5081]: I1003 18:00:02.981133 5081 scope.go:117] "RemoveContainer" containerID="c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a" Oct 03 18:00:02 crc kubenswrapper[5081]: E1003 18:00:02.988852 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41b108a7_949e_41e0_984d_21845d76d4eb.slice/crio-b7a6aac07d49018b7792034215adaf124e6f0393687ec3c83a709dd3239ecd06\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee0241b5_68ec_419c_8f4a_64d92e49a60b.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee0241b5_68ec_419c_8f4a_64d92e49a60b.slice/crio-140d15a752a8c7d4fa9a9e908656d3a44fa66cbd45f23bfc126e7851dc028b9e\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41b108a7_949e_41e0_984d_21845d76d4eb.slice\": RecentStats: unable to find data in memory cache]" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.008065 5081 scope.go:117] "RemoveContainer" containerID="e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734" Oct 03 18:00:03 crc kubenswrapper[5081]: E1003 18:00:03.008596 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734\": container with ID starting with e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734 not found: ID does not exist" containerID="e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.008637 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734"} err="failed to get container status \"e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734\": rpc error: code = NotFound desc = could not find container \"e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734\": container with ID starting with e99d8166d61d3f69dad3ad1503904c17397a9d122e965f786cb3747818e7b734 not found: ID does not exist" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.008916 5081 scope.go:117] "RemoveContainer" containerID="c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a" Oct 03 18:00:03 crc kubenswrapper[5081]: E1003 18:00:03.009212 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a\": container with ID starting with c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a not found: ID does not exist" containerID="c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.009237 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a"} err="failed to get container status \"c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a\": rpc error: code = NotFound desc = could not find container \"c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a\": container with ID starting with c9c7b05eb101830f1524fad410c6db5797c4ea54a30525a15801d083defd9e6a not found: ID does not exist" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.009253 5081 scope.go:117] "RemoveContainer" containerID="b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.035097 5081 scope.go:117] "RemoveContainer" containerID="b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7" Oct 03 18:00:03 crc kubenswrapper[5081]: E1003 18:00:03.035510 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7\": container with ID starting with b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7 not found: ID does not exist" containerID="b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.035553 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7"} err="failed to get container status \"b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7\": rpc error: code = NotFound desc = could not find container \"b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7\": container with ID starting with b6c2d6ce4cb3f520e342f11b7a7d6bd9c4cdd5169ae7688447c070171b0667f7 not found: ID does not exist" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.035813 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ncd2\" (UniqueName: \"kubernetes.io/projected/c095f0e1-925d-4ed3-afc0-7392d36ce821-kube-api-access-2ncd2\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.035872 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/995b2446-f90a-4493-8a6f-668d1b2bd321-logs\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.035921 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp6xw\" (UniqueName: \"kubernetes.io/projected/995b2446-f90a-4493-8a6f-668d1b2bd321-kube-api-access-lp6xw\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.036043 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c095f0e1-925d-4ed3-afc0-7392d36ce821-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.036156 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c095f0e1-925d-4ed3-afc0-7392d36ce821-logs\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.036216 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/995b2446-f90a-4493-8a6f-668d1b2bd321-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.036252 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/995b2446-f90a-4493-8a6f-668d1b2bd321-config-data\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.036277 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c095f0e1-925d-4ed3-afc0-7392d36ce821-config-data\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.111996 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138149 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/995b2446-f90a-4493-8a6f-668d1b2bd321-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138211 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/995b2446-f90a-4493-8a6f-668d1b2bd321-config-data\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138242 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c095f0e1-925d-4ed3-afc0-7392d36ce821-config-data\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138353 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ncd2\" (UniqueName: \"kubernetes.io/projected/c095f0e1-925d-4ed3-afc0-7392d36ce821-kube-api-access-2ncd2\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138388 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/995b2446-f90a-4493-8a6f-668d1b2bd321-logs\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138428 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp6xw\" (UniqueName: \"kubernetes.io/projected/995b2446-f90a-4493-8a6f-668d1b2bd321-kube-api-access-lp6xw\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138495 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c095f0e1-925d-4ed3-afc0-7392d36ce821-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.138599 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c095f0e1-925d-4ed3-afc0-7392d36ce821-logs\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.139070 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c095f0e1-925d-4ed3-afc0-7392d36ce821-logs\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.139410 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/995b2446-f90a-4493-8a6f-668d1b2bd321-logs\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.141173 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.143981 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c095f0e1-925d-4ed3-afc0-7392d36ce821-config-data\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.144069 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c095f0e1-925d-4ed3-afc0-7392d36ce821-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.145069 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/995b2446-f90a-4493-8a6f-668d1b2bd321-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.146249 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/995b2446-f90a-4493-8a6f-668d1b2bd321-config-data\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.155693 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.160328 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ncd2\" (UniqueName: \"kubernetes.io/projected/c095f0e1-925d-4ed3-afc0-7392d36ce821-kube-api-access-2ncd2\") pod \"nova-metadata-0\" (UID: \"c095f0e1-925d-4ed3-afc0-7392d36ce821\") " pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.163873 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.167489 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp6xw\" (UniqueName: \"kubernetes.io/projected/995b2446-f90a-4493-8a6f-668d1b2bd321-kube-api-access-lp6xw\") pod \"nova-api-0\" (UID: \"995b2446-f90a-4493-8a6f-668d1b2bd321\") " pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.170101 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.175695 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.279237 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.290132 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.343455 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b68a946-83c2-4eb6-8472-11a5fc334f38-config-data\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.343888 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b68a946-83c2-4eb6-8472-11a5fc334f38-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.343953 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkvzh\" (UniqueName: \"kubernetes.io/projected/5b68a946-83c2-4eb6-8472-11a5fc334f38-kube-api-access-zkvzh\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.446063 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b68a946-83c2-4eb6-8472-11a5fc334f38-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.446119 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkvzh\" (UniqueName: \"kubernetes.io/projected/5b68a946-83c2-4eb6-8472-11a5fc334f38-kube-api-access-zkvzh\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.446186 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b68a946-83c2-4eb6-8472-11a5fc334f38-config-data\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.454971 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b68a946-83c2-4eb6-8472-11a5fc334f38-config-data\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.454977 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b68a946-83c2-4eb6-8472-11a5fc334f38-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.466228 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkvzh\" (UniqueName: \"kubernetes.io/projected/5b68a946-83c2-4eb6-8472-11a5fc334f38-kube-api-access-zkvzh\") pod \"nova-scheduler-0\" (UID: \"5b68a946-83c2-4eb6-8472-11a5fc334f38\") " pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.517672 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.779958 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a25ab879-1c41-4b6e-920c-51903f580487","Type":"ContainerStarted","Data":"d21cd7df25dfe57ef3430549c1af7d0595d016dca161bc9108fce6e80b501dfb"} Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.780240 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.810486 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.810470228 podStartE2EDuration="3.810470228s" podCreationTimestamp="2025-10-03 18:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:00:03.798249498 +0000 UTC m=+9122.763806121" watchObservedRunningTime="2025-10-03 18:00:03.810470228 +0000 UTC m=+9122.776026841" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.843069 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41b108a7-949e-41e0-984d-21845d76d4eb" path="/var/lib/kubelet/pods/41b108a7-949e-41e0-984d-21845d76d4eb/volumes" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.843857 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da03a230-eac7-4927-a20f-c680a7647aa3" path="/var/lib/kubelet/pods/da03a230-eac7-4927-a20f-c680a7647aa3/volumes" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.844412 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee0241b5-68ec-419c-8f4a-64d92e49a60b" path="/var/lib/kubelet/pods/ee0241b5-68ec-419c-8f4a-64d92e49a60b/volumes" Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.869237 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 18:00:03 crc kubenswrapper[5081]: W1003 18:00:03.871833 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod995b2446_f90a_4493_8a6f_668d1b2bd321.slice/crio-2d6192f7f07522c3f5ed87c9170a431dd3e9e82861cf33f4513a27975ac6947e WatchSource:0}: Error finding container 2d6192f7f07522c3f5ed87c9170a431dd3e9e82861cf33f4513a27975ac6947e: Status 404 returned error can't find the container with id 2d6192f7f07522c3f5ed87c9170a431dd3e9e82861cf33f4513a27975ac6947e Oct 03 18:00:03 crc kubenswrapper[5081]: I1003 18:00:03.953237 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 18:00:03 crc kubenswrapper[5081]: W1003 18:00:03.970461 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc095f0e1_925d_4ed3_afc0_7392d36ce821.slice/crio-ccc648765f5ba80c80d634f2f079023712368a8e92f9db1aae8fdd0c1550a973 WatchSource:0}: Error finding container ccc648765f5ba80c80d634f2f079023712368a8e92f9db1aae8fdd0c1550a973: Status 404 returned error can't find the container with id ccc648765f5ba80c80d634f2f079023712368a8e92f9db1aae8fdd0c1550a973 Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.134636 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 18:00:04 crc kubenswrapper[5081]: W1003 18:00:04.137410 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b68a946_83c2_4eb6_8472_11a5fc334f38.slice/crio-68d944c31be1b6094d2450c0164afe8b30ed8724563d5b2c45e78aea36f0c91c WatchSource:0}: Error finding container 68d944c31be1b6094d2450c0164afe8b30ed8724563d5b2c45e78aea36f0c91c: Status 404 returned error can't find the container with id 68d944c31be1b6094d2450c0164afe8b30ed8724563d5b2c45e78aea36f0c91c Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.141192 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.268722 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdxpv\" (UniqueName: \"kubernetes.io/projected/722dcb0f-93f4-4a37-a8c2-75c6a301a047-kube-api-access-rdxpv\") pod \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.268920 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/722dcb0f-93f4-4a37-a8c2-75c6a301a047-config-volume\") pod \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.268973 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/722dcb0f-93f4-4a37-a8c2-75c6a301a047-secret-volume\") pod \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\" (UID: \"722dcb0f-93f4-4a37-a8c2-75c6a301a047\") " Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.272842 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/722dcb0f-93f4-4a37-a8c2-75c6a301a047-config-volume" (OuterVolumeSpecName: "config-volume") pod "722dcb0f-93f4-4a37-a8c2-75c6a301a047" (UID: "722dcb0f-93f4-4a37-a8c2-75c6a301a047"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.275115 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/722dcb0f-93f4-4a37-a8c2-75c6a301a047-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "722dcb0f-93f4-4a37-a8c2-75c6a301a047" (UID: "722dcb0f-93f4-4a37-a8c2-75c6a301a047"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.276780 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/722dcb0f-93f4-4a37-a8c2-75c6a301a047-kube-api-access-rdxpv" (OuterVolumeSpecName: "kube-api-access-rdxpv") pod "722dcb0f-93f4-4a37-a8c2-75c6a301a047" (UID: "722dcb0f-93f4-4a37-a8c2-75c6a301a047"). InnerVolumeSpecName "kube-api-access-rdxpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.371534 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdxpv\" (UniqueName: \"kubernetes.io/projected/722dcb0f-93f4-4a37-a8c2-75c6a301a047-kube-api-access-rdxpv\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.371607 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/722dcb0f-93f4-4a37-a8c2-75c6a301a047-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.371620 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/722dcb0f-93f4-4a37-a8c2-75c6a301a047-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.800507 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c095f0e1-925d-4ed3-afc0-7392d36ce821","Type":"ContainerStarted","Data":"04e5f18c5053abc68c69f0ef605f49e758ea9323021e888176311ccaeda19da7"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.800604 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c095f0e1-925d-4ed3-afc0-7392d36ce821","Type":"ContainerStarted","Data":"b52df47d27b4e72db012534733e6ff12ff496acdb4c5763d8c33e3c745269e3c"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.800621 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c095f0e1-925d-4ed3-afc0-7392d36ce821","Type":"ContainerStarted","Data":"ccc648765f5ba80c80d634f2f079023712368a8e92f9db1aae8fdd0c1550a973"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.803721 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"995b2446-f90a-4493-8a6f-668d1b2bd321","Type":"ContainerStarted","Data":"452936903e2b96daf7cab59ed88208933eba35913a0c345007efdedadbb5d16c"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.803767 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"995b2446-f90a-4493-8a6f-668d1b2bd321","Type":"ContainerStarted","Data":"595f9a8d3d8b930b02e7b7cc5b8fad2aa599b93ef64f8555c3634871aa215905"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.803779 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"995b2446-f90a-4493-8a6f-668d1b2bd321","Type":"ContainerStarted","Data":"2d6192f7f07522c3f5ed87c9170a431dd3e9e82861cf33f4513a27975ac6947e"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.805734 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" event={"ID":"722dcb0f-93f4-4a37-a8c2-75c6a301a047","Type":"ContainerDied","Data":"db9d8c5737e51029cfbdefe0519d6ef5b469edeb96cc560163b077c6b364a479"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.805756 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325240-xmvfb" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.805764 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db9d8c5737e51029cfbdefe0519d6ef5b469edeb96cc560163b077c6b364a479" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.807617 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5b68a946-83c2-4eb6-8472-11a5fc334f38","Type":"ContainerStarted","Data":"41142e6b36f07c64af8c22d0476c62cb132a838ab360dad4ce6c85240627a9cd"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.807660 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5b68a946-83c2-4eb6-8472-11a5fc334f38","Type":"ContainerStarted","Data":"68d944c31be1b6094d2450c0164afe8b30ed8724563d5b2c45e78aea36f0c91c"} Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.824493 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.824473981 podStartE2EDuration="2.824473981s" podCreationTimestamp="2025-10-03 18:00:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:00:04.820477146 +0000 UTC m=+9123.786033769" watchObservedRunningTime="2025-10-03 18:00:04.824473981 +0000 UTC m=+9123.790030614" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.842273 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.842254299 podStartE2EDuration="2.842254299s" podCreationTimestamp="2025-10-03 18:00:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:00:04.841181348 +0000 UTC m=+9123.806737981" watchObservedRunningTime="2025-10-03 18:00:04.842254299 +0000 UTC m=+9123.807810922" Oct 03 18:00:04 crc kubenswrapper[5081]: I1003 18:00:04.867878 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.867861711 podStartE2EDuration="1.867861711s" podCreationTimestamp="2025-10-03 18:00:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:00:04.860311645 +0000 UTC m=+9123.825868258" watchObservedRunningTime="2025-10-03 18:00:04.867861711 +0000 UTC m=+9123.833418324" Oct 03 18:00:05 crc kubenswrapper[5081]: I1003 18:00:05.217345 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r"] Oct 03 18:00:05 crc kubenswrapper[5081]: I1003 18:00:05.233589 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325195-6lx9r"] Oct 03 18:00:05 crc kubenswrapper[5081]: I1003 18:00:05.840186 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d25a54a-5d99-4948-b99e-374c1fa40681" path="/var/lib/kubelet/pods/8d25a54a-5d99-4948-b99e-374c1fa40681/volumes" Oct 03 18:00:08 crc kubenswrapper[5081]: I1003 18:00:08.279949 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 18:00:08 crc kubenswrapper[5081]: I1003 18:00:08.280065 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 18:00:08 crc kubenswrapper[5081]: I1003 18:00:08.519444 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 18:00:11 crc kubenswrapper[5081]: I1003 18:00:11.093501 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 03 18:00:11 crc kubenswrapper[5081]: I1003 18:00:11.145704 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 03 18:00:13 crc kubenswrapper[5081]: I1003 18:00:13.280328 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 18:00:13 crc kubenswrapper[5081]: I1003 18:00:13.280892 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 18:00:13 crc kubenswrapper[5081]: I1003 18:00:13.290570 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 18:00:13 crc kubenswrapper[5081]: I1003 18:00:13.290637 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 18:00:13 crc kubenswrapper[5081]: I1003 18:00:13.519305 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 18:00:13 crc kubenswrapper[5081]: I1003 18:00:13.560067 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 18:00:13 crc kubenswrapper[5081]: I1003 18:00:13.953181 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 18:00:14 crc kubenswrapper[5081]: I1003 18:00:14.361742 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c095f0e1-925d-4ed3-afc0-7392d36ce821" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 18:00:14 crc kubenswrapper[5081]: I1003 18:00:14.443871 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="995b2446-f90a-4493-8a6f-668d1b2bd321" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 18:00:14 crc kubenswrapper[5081]: I1003 18:00:14.443885 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c095f0e1-925d-4ed3-afc0-7392d36ce821" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 18:00:14 crc kubenswrapper[5081]: I1003 18:00:14.443940 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="995b2446-f90a-4493-8a6f-668d1b2bd321" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.282245 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.282850 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.284540 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.285102 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.294710 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.296153 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.296444 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 18:00:23 crc kubenswrapper[5081]: I1003 18:00:23.299093 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 18:00:24 crc kubenswrapper[5081]: I1003 18:00:24.024960 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 18:00:24 crc kubenswrapper[5081]: I1003 18:00:24.030828 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.165929 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc"] Oct 03 18:00:25 crc kubenswrapper[5081]: E1003 18:00:25.166363 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="722dcb0f-93f4-4a37-a8c2-75c6a301a047" containerName="collect-profiles" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.166374 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="722dcb0f-93f4-4a37-a8c2-75c6a301a047" containerName="collect-profiles" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.166578 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="722dcb0f-93f4-4a37-a8c2-75c6a301a047" containerName="collect-profiles" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.167345 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.173715 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.173972 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.174238 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.174611 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.174807 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.175102 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.175271 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-j8j2p" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.181512 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc"] Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235070 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235414 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235460 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235508 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235530 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235643 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235749 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5m4r\" (UniqueName: \"kubernetes.io/projected/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-kube-api-access-d5m4r\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235778 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235817 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235919 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.235994 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337346 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337416 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337437 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337466 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337502 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5m4r\" (UniqueName: \"kubernetes.io/projected/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-kube-api-access-d5m4r\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337525 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337581 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337633 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337686 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337788 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.337830 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.339103 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.339265 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.346321 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.346735 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.346859 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.347581 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.353148 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.353816 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.354309 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.364157 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.365417 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5m4r\" (UniqueName: \"kubernetes.io/projected/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-kube-api-access-d5m4r\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:25 crc kubenswrapper[5081]: I1003 18:00:25.488214 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:00:26 crc kubenswrapper[5081]: I1003 18:00:26.116407 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc"] Oct 03 18:00:27 crc kubenswrapper[5081]: I1003 18:00:27.134752 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" event={"ID":"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c","Type":"ContainerStarted","Data":"31580c26fa6d9640ac012838673426bd61c9de76acc106d3bd4b3c3f70d060e2"} Oct 03 18:00:28 crc kubenswrapper[5081]: I1003 18:00:28.163642 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" event={"ID":"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c","Type":"ContainerStarted","Data":"d59021c10c703b9e672ab808683bcb1aa9a1fd1e474185f1a46d80093767bf80"} Oct 03 18:00:28 crc kubenswrapper[5081]: I1003 18:00:28.184628 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" podStartSLOduration=2.282596738 podStartE2EDuration="3.184604689s" podCreationTimestamp="2025-10-03 18:00:25 +0000 UTC" firstStartedPulling="2025-10-03 18:00:26.128443518 +0000 UTC m=+9145.094000131" lastFinishedPulling="2025-10-03 18:00:27.030451469 +0000 UTC m=+9145.996008082" observedRunningTime="2025-10-03 18:00:28.180409009 +0000 UTC m=+9147.145965622" watchObservedRunningTime="2025-10-03 18:00:28.184604689 +0000 UTC m=+9147.150161312" Oct 03 18:00:29 crc kubenswrapper[5081]: I1003 18:00:29.029734 5081 scope.go:117] "RemoveContainer" containerID="23923f40825074c4a6207c51bf39d02be0d8443aec4d7996035e69d513fb152c" Oct 03 18:00:30 crc kubenswrapper[5081]: I1003 18:00:30.647263 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:00:30 crc kubenswrapper[5081]: I1003 18:00:30.647572 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.151812 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29325241-j44l4"] Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.154372 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.162360 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325241-j44l4"] Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.234701 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-combined-ca-bundle\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.234928 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84smq\" (UniqueName: \"kubernetes.io/projected/b1f28221-9081-4daf-aea6-42b308f80bae-kube-api-access-84smq\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.234970 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-fernet-keys\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.235054 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-config-data\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.336590 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84smq\" (UniqueName: \"kubernetes.io/projected/b1f28221-9081-4daf-aea6-42b308f80bae-kube-api-access-84smq\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.336651 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-fernet-keys\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.336730 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-config-data\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.336771 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-combined-ca-bundle\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.347659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-combined-ca-bundle\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.347790 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-config-data\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.352364 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-fernet-keys\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.352613 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84smq\" (UniqueName: \"kubernetes.io/projected/b1f28221-9081-4daf-aea6-42b308f80bae-kube-api-access-84smq\") pod \"keystone-cron-29325241-j44l4\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.482533 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.647080 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.647463 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:01:00 crc kubenswrapper[5081]: I1003 18:01:00.939682 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325241-j44l4"] Oct 03 18:01:01 crc kubenswrapper[5081]: I1003 18:01:01.490585 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325241-j44l4" event={"ID":"b1f28221-9081-4daf-aea6-42b308f80bae","Type":"ContainerStarted","Data":"38dd5ac34d7089105c69410634c88f0518ef2be58044b3bcbb0c66b30f3354bb"} Oct 03 18:01:01 crc kubenswrapper[5081]: I1003 18:01:01.491801 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325241-j44l4" event={"ID":"b1f28221-9081-4daf-aea6-42b308f80bae","Type":"ContainerStarted","Data":"b9469042542591474a91184569cd68199f5d8c86eaa86f5fbd30bed21624b7dd"} Oct 03 18:01:01 crc kubenswrapper[5081]: I1003 18:01:01.510188 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29325241-j44l4" podStartSLOduration=1.5101713810000001 podStartE2EDuration="1.510171381s" podCreationTimestamp="2025-10-03 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:01:01.507880576 +0000 UTC m=+9180.473437209" watchObservedRunningTime="2025-10-03 18:01:01.510171381 +0000 UTC m=+9180.475727994" Oct 03 18:01:06 crc kubenswrapper[5081]: I1003 18:01:06.539288 5081 generic.go:334] "Generic (PLEG): container finished" podID="b1f28221-9081-4daf-aea6-42b308f80bae" containerID="38dd5ac34d7089105c69410634c88f0518ef2be58044b3bcbb0c66b30f3354bb" exitCode=0 Oct 03 18:01:06 crc kubenswrapper[5081]: I1003 18:01:06.539371 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325241-j44l4" event={"ID":"b1f28221-9081-4daf-aea6-42b308f80bae","Type":"ContainerDied","Data":"38dd5ac34d7089105c69410634c88f0518ef2be58044b3bcbb0c66b30f3354bb"} Oct 03 18:01:07 crc kubenswrapper[5081]: I1003 18:01:07.941040 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:07 crc kubenswrapper[5081]: I1003 18:01:07.999794 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-combined-ca-bundle\") pod \"b1f28221-9081-4daf-aea6-42b308f80bae\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:07.999972 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-config-data\") pod \"b1f28221-9081-4daf-aea6-42b308f80bae\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.000121 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84smq\" (UniqueName: \"kubernetes.io/projected/b1f28221-9081-4daf-aea6-42b308f80bae-kube-api-access-84smq\") pod \"b1f28221-9081-4daf-aea6-42b308f80bae\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.000249 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-fernet-keys\") pod \"b1f28221-9081-4daf-aea6-42b308f80bae\" (UID: \"b1f28221-9081-4daf-aea6-42b308f80bae\") " Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.006343 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b1f28221-9081-4daf-aea6-42b308f80bae" (UID: "b1f28221-9081-4daf-aea6-42b308f80bae"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.012692 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1f28221-9081-4daf-aea6-42b308f80bae-kube-api-access-84smq" (OuterVolumeSpecName: "kube-api-access-84smq") pod "b1f28221-9081-4daf-aea6-42b308f80bae" (UID: "b1f28221-9081-4daf-aea6-42b308f80bae"). InnerVolumeSpecName "kube-api-access-84smq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.032493 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1f28221-9081-4daf-aea6-42b308f80bae" (UID: "b1f28221-9081-4daf-aea6-42b308f80bae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.054512 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-config-data" (OuterVolumeSpecName: "config-data") pod "b1f28221-9081-4daf-aea6-42b308f80bae" (UID: "b1f28221-9081-4daf-aea6-42b308f80bae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.103029 5081 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.103059 5081 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.103068 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84smq\" (UniqueName: \"kubernetes.io/projected/b1f28221-9081-4daf-aea6-42b308f80bae-kube-api-access-84smq\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.103077 5081 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1f28221-9081-4daf-aea6-42b308f80bae-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.560971 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325241-j44l4" event={"ID":"b1f28221-9081-4daf-aea6-42b308f80bae","Type":"ContainerDied","Data":"b9469042542591474a91184569cd68199f5d8c86eaa86f5fbd30bed21624b7dd"} Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.561013 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9469042542591474a91184569cd68199f5d8c86eaa86f5fbd30bed21624b7dd" Oct 03 18:01:08 crc kubenswrapper[5081]: I1003 18:01:08.561041 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325241-j44l4" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.793414 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wmvkw"] Oct 03 18:01:10 crc kubenswrapper[5081]: E1003 18:01:10.795277 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1f28221-9081-4daf-aea6-42b308f80bae" containerName="keystone-cron" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.795294 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1f28221-9081-4daf-aea6-42b308f80bae" containerName="keystone-cron" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.796246 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1f28221-9081-4daf-aea6-42b308f80bae" containerName="keystone-cron" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.800739 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.816578 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wmvkw"] Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.868662 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-utilities\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.868722 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-catalog-content\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.869006 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhtbv\" (UniqueName: \"kubernetes.io/projected/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-kube-api-access-jhtbv\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.971073 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhtbv\" (UniqueName: \"kubernetes.io/projected/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-kube-api-access-jhtbv\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.971166 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-utilities\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.971187 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-catalog-content\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.971687 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-catalog-content\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:10 crc kubenswrapper[5081]: I1003 18:01:10.972023 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-utilities\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:11 crc kubenswrapper[5081]: I1003 18:01:11.009988 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhtbv\" (UniqueName: \"kubernetes.io/projected/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-kube-api-access-jhtbv\") pod \"certified-operators-wmvkw\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:11 crc kubenswrapper[5081]: I1003 18:01:11.139813 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:11 crc kubenswrapper[5081]: I1003 18:01:11.653997 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wmvkw"] Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.597064 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5snqt"] Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.600603 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.608474 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5snqt"] Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.619170 5081 generic.go:334] "Generic (PLEG): container finished" podID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerID="5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7" exitCode=0 Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.619221 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmvkw" event={"ID":"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2","Type":"ContainerDied","Data":"5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7"} Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.619256 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmvkw" event={"ID":"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2","Type":"ContainerStarted","Data":"00a362707799c85ee5be9df944cc89d637460db4d1165b653479d9c07abe1ff4"} Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.624418 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.713904 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-utilities\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.714261 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24lw5\" (UniqueName: \"kubernetes.io/projected/de7d607f-d7f2-43da-9860-801a1f0166bb-kube-api-access-24lw5\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.714544 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-catalog-content\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.816116 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24lw5\" (UniqueName: \"kubernetes.io/projected/de7d607f-d7f2-43da-9860-801a1f0166bb-kube-api-access-24lw5\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.816249 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-catalog-content\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.816343 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-utilities\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.816961 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-catalog-content\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.817032 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-utilities\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.836969 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24lw5\" (UniqueName: \"kubernetes.io/projected/de7d607f-d7f2-43da-9860-801a1f0166bb-kube-api-access-24lw5\") pod \"community-operators-5snqt\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:12 crc kubenswrapper[5081]: I1003 18:01:12.931930 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:13 crc kubenswrapper[5081]: I1003 18:01:13.469427 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5snqt"] Oct 03 18:01:13 crc kubenswrapper[5081]: I1003 18:01:13.629844 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5snqt" event={"ID":"de7d607f-d7f2-43da-9860-801a1f0166bb","Type":"ContainerStarted","Data":"47b191e34f45a6db9a12884b03d6cda52e9cb8859ea791711aed1336f7d8b049"} Oct 03 18:01:14 crc kubenswrapper[5081]: I1003 18:01:14.646866 5081 generic.go:334] "Generic (PLEG): container finished" podID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerID="5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7" exitCode=0 Oct 03 18:01:14 crc kubenswrapper[5081]: I1003 18:01:14.646940 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5snqt" event={"ID":"de7d607f-d7f2-43da-9860-801a1f0166bb","Type":"ContainerDied","Data":"5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7"} Oct 03 18:01:14 crc kubenswrapper[5081]: I1003 18:01:14.651731 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmvkw" event={"ID":"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2","Type":"ContainerStarted","Data":"6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9"} Oct 03 18:01:15 crc kubenswrapper[5081]: I1003 18:01:15.666116 5081 generic.go:334] "Generic (PLEG): container finished" podID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerID="6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9" exitCode=0 Oct 03 18:01:15 crc kubenswrapper[5081]: I1003 18:01:15.666313 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmvkw" event={"ID":"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2","Type":"ContainerDied","Data":"6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9"} Oct 03 18:01:16 crc kubenswrapper[5081]: I1003 18:01:16.677278 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5snqt" event={"ID":"de7d607f-d7f2-43da-9860-801a1f0166bb","Type":"ContainerStarted","Data":"f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b"} Oct 03 18:01:17 crc kubenswrapper[5081]: I1003 18:01:17.694870 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmvkw" event={"ID":"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2","Type":"ContainerStarted","Data":"313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14"} Oct 03 18:01:17 crc kubenswrapper[5081]: I1003 18:01:17.699645 5081 generic.go:334] "Generic (PLEG): container finished" podID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerID="f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b" exitCode=0 Oct 03 18:01:17 crc kubenswrapper[5081]: I1003 18:01:17.699686 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5snqt" event={"ID":"de7d607f-d7f2-43da-9860-801a1f0166bb","Type":"ContainerDied","Data":"f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b"} Oct 03 18:01:17 crc kubenswrapper[5081]: I1003 18:01:17.736910 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wmvkw" podStartSLOduration=4.117894266 podStartE2EDuration="7.73688939s" podCreationTimestamp="2025-10-03 18:01:10 +0000 UTC" firstStartedPulling="2025-10-03 18:01:12.624035332 +0000 UTC m=+9191.589591955" lastFinishedPulling="2025-10-03 18:01:16.243030436 +0000 UTC m=+9195.208587079" observedRunningTime="2025-10-03 18:01:17.728068867 +0000 UTC m=+9196.693625480" watchObservedRunningTime="2025-10-03 18:01:17.73688939 +0000 UTC m=+9196.702446003" Oct 03 18:01:18 crc kubenswrapper[5081]: I1003 18:01:18.714077 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5snqt" event={"ID":"de7d607f-d7f2-43da-9860-801a1f0166bb","Type":"ContainerStarted","Data":"67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3"} Oct 03 18:01:18 crc kubenswrapper[5081]: I1003 18:01:18.742687 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5snqt" podStartSLOduration=3.2713538939999998 podStartE2EDuration="6.742671997s" podCreationTimestamp="2025-10-03 18:01:12 +0000 UTC" firstStartedPulling="2025-10-03 18:01:14.64935359 +0000 UTC m=+9193.614910203" lastFinishedPulling="2025-10-03 18:01:18.120671693 +0000 UTC m=+9197.086228306" observedRunningTime="2025-10-03 18:01:18.732867466 +0000 UTC m=+9197.698424079" watchObservedRunningTime="2025-10-03 18:01:18.742671997 +0000 UTC m=+9197.708228610" Oct 03 18:01:21 crc kubenswrapper[5081]: I1003 18:01:21.140045 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:21 crc kubenswrapper[5081]: I1003 18:01:21.142127 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:22 crc kubenswrapper[5081]: I1003 18:01:22.209408 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-wmvkw" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="registry-server" probeResult="failure" output=< Oct 03 18:01:22 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 18:01:22 crc kubenswrapper[5081]: > Oct 03 18:01:22 crc kubenswrapper[5081]: I1003 18:01:22.932842 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:22 crc kubenswrapper[5081]: I1003 18:01:22.932896 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:22 crc kubenswrapper[5081]: I1003 18:01:22.989974 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:23 crc kubenswrapper[5081]: I1003 18:01:23.821817 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:24 crc kubenswrapper[5081]: I1003 18:01:24.381884 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5snqt"] Oct 03 18:01:25 crc kubenswrapper[5081]: I1003 18:01:25.782302 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5snqt" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="registry-server" containerID="cri-o://67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3" gracePeriod=2 Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.739214 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.802005 5081 generic.go:334] "Generic (PLEG): container finished" podID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerID="67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3" exitCode=0 Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.802055 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5snqt" event={"ID":"de7d607f-d7f2-43da-9860-801a1f0166bb","Type":"ContainerDied","Data":"67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3"} Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.802089 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5snqt" event={"ID":"de7d607f-d7f2-43da-9860-801a1f0166bb","Type":"ContainerDied","Data":"47b191e34f45a6db9a12884b03d6cda52e9cb8859ea791711aed1336f7d8b049"} Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.802112 5081 scope.go:117] "RemoveContainer" containerID="67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.802281 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5snqt" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.826350 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24lw5\" (UniqueName: \"kubernetes.io/projected/de7d607f-d7f2-43da-9860-801a1f0166bb-kube-api-access-24lw5\") pod \"de7d607f-d7f2-43da-9860-801a1f0166bb\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.826406 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-utilities\") pod \"de7d607f-d7f2-43da-9860-801a1f0166bb\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.826534 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-catalog-content\") pod \"de7d607f-d7f2-43da-9860-801a1f0166bb\" (UID: \"de7d607f-d7f2-43da-9860-801a1f0166bb\") " Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.828002 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-utilities" (OuterVolumeSpecName: "utilities") pod "de7d607f-d7f2-43da-9860-801a1f0166bb" (UID: "de7d607f-d7f2-43da-9860-801a1f0166bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.831106 5081 scope.go:117] "RemoveContainer" containerID="f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.836906 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de7d607f-d7f2-43da-9860-801a1f0166bb-kube-api-access-24lw5" (OuterVolumeSpecName: "kube-api-access-24lw5") pod "de7d607f-d7f2-43da-9860-801a1f0166bb" (UID: "de7d607f-d7f2-43da-9860-801a1f0166bb"). InnerVolumeSpecName "kube-api-access-24lw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.886354 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de7d607f-d7f2-43da-9860-801a1f0166bb" (UID: "de7d607f-d7f2-43da-9860-801a1f0166bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.909512 5081 scope.go:117] "RemoveContainer" containerID="5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.931359 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24lw5\" (UniqueName: \"kubernetes.io/projected/de7d607f-d7f2-43da-9860-801a1f0166bb-kube-api-access-24lw5\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.931391 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.931403 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7d607f-d7f2-43da-9860-801a1f0166bb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.948484 5081 scope.go:117] "RemoveContainer" containerID="67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3" Oct 03 18:01:26 crc kubenswrapper[5081]: E1003 18:01:26.949277 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3\": container with ID starting with 67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3 not found: ID does not exist" containerID="67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.949342 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3"} err="failed to get container status \"67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3\": rpc error: code = NotFound desc = could not find container \"67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3\": container with ID starting with 67237112fd96b1e1c9f7124e75cbdd68ac89157b61f6ffe2141f8c57279fa4a3 not found: ID does not exist" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.949377 5081 scope.go:117] "RemoveContainer" containerID="f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b" Oct 03 18:01:26 crc kubenswrapper[5081]: E1003 18:01:26.949948 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b\": container with ID starting with f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b not found: ID does not exist" containerID="f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.949992 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b"} err="failed to get container status \"f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b\": rpc error: code = NotFound desc = could not find container \"f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b\": container with ID starting with f3a9464665723b1a16c1665653eecc571b15f8b54ed89e5f9555cb9b99a6c57b not found: ID does not exist" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.950020 5081 scope.go:117] "RemoveContainer" containerID="5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7" Oct 03 18:01:26 crc kubenswrapper[5081]: E1003 18:01:26.950339 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7\": container with ID starting with 5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7 not found: ID does not exist" containerID="5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7" Oct 03 18:01:26 crc kubenswrapper[5081]: I1003 18:01:26.950366 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7"} err="failed to get container status \"5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7\": rpc error: code = NotFound desc = could not find container \"5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7\": container with ID starting with 5ebe73d417718e4b4bc9fe1a83bc1b79e859a5e1b0845a92806c679bcef557f7 not found: ID does not exist" Oct 03 18:01:27 crc kubenswrapper[5081]: I1003 18:01:27.146865 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5snqt"] Oct 03 18:01:27 crc kubenswrapper[5081]: I1003 18:01:27.156701 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5snqt"] Oct 03 18:01:27 crc kubenswrapper[5081]: I1003 18:01:27.844748 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" path="/var/lib/kubelet/pods/de7d607f-d7f2-43da-9860-801a1f0166bb/volumes" Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.647378 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.647977 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.648022 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.648655 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.648708 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" gracePeriod=600 Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.856591 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" exitCode=0 Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.856666 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401"} Oct 03 18:01:30 crc kubenswrapper[5081]: I1003 18:01:30.856948 5081 scope.go:117] "RemoveContainer" containerID="7960da5be1683ebe72e4312f9b6be5953a6337b9dd10325cd4ffa8ae67227b54" Oct 03 18:01:30 crc kubenswrapper[5081]: E1003 18:01:30.929709 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:01:31 crc kubenswrapper[5081]: I1003 18:01:31.188599 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:31 crc kubenswrapper[5081]: I1003 18:01:31.239301 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:31 crc kubenswrapper[5081]: I1003 18:01:31.428072 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wmvkw"] Oct 03 18:01:31 crc kubenswrapper[5081]: I1003 18:01:31.875874 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:01:31 crc kubenswrapper[5081]: E1003 18:01:31.877133 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:01:32 crc kubenswrapper[5081]: I1003 18:01:32.898443 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wmvkw" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="registry-server" containerID="cri-o://313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14" gracePeriod=2 Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.444420 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.502849 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-utilities\") pod \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.503083 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhtbv\" (UniqueName: \"kubernetes.io/projected/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-kube-api-access-jhtbv\") pod \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.503169 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-catalog-content\") pod \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\" (UID: \"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2\") " Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.509401 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-kube-api-access-jhtbv" (OuterVolumeSpecName: "kube-api-access-jhtbv") pod "a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" (UID: "a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2"). InnerVolumeSpecName "kube-api-access-jhtbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.519311 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-utilities" (OuterVolumeSpecName: "utilities") pod "a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" (UID: "a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.546338 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" (UID: "a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.606167 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.606212 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.606224 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhtbv\" (UniqueName: \"kubernetes.io/projected/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2-kube-api-access-jhtbv\") on node \"crc\" DevicePath \"\"" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.914715 5081 generic.go:334] "Generic (PLEG): container finished" podID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerID="313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14" exitCode=0 Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.914761 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmvkw" event={"ID":"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2","Type":"ContainerDied","Data":"313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14"} Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.914793 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmvkw" event={"ID":"a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2","Type":"ContainerDied","Data":"00a362707799c85ee5be9df944cc89d637460db4d1165b653479d9c07abe1ff4"} Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.914813 5081 scope.go:117] "RemoveContainer" containerID="313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.914949 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmvkw" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.944173 5081 scope.go:117] "RemoveContainer" containerID="6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9" Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.950709 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wmvkw"] Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.959629 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wmvkw"] Oct 03 18:01:33 crc kubenswrapper[5081]: I1003 18:01:33.965572 5081 scope.go:117] "RemoveContainer" containerID="5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7" Oct 03 18:01:34 crc kubenswrapper[5081]: I1003 18:01:34.020695 5081 scope.go:117] "RemoveContainer" containerID="313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14" Oct 03 18:01:34 crc kubenswrapper[5081]: E1003 18:01:34.021076 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14\": container with ID starting with 313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14 not found: ID does not exist" containerID="313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14" Oct 03 18:01:34 crc kubenswrapper[5081]: I1003 18:01:34.021119 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14"} err="failed to get container status \"313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14\": rpc error: code = NotFound desc = could not find container \"313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14\": container with ID starting with 313f940ec4d5d18b7c1e5981b14f80b698f5c3b04f4d2234e1f3769ebe59fe14 not found: ID does not exist" Oct 03 18:01:34 crc kubenswrapper[5081]: I1003 18:01:34.021148 5081 scope.go:117] "RemoveContainer" containerID="6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9" Oct 03 18:01:34 crc kubenswrapper[5081]: E1003 18:01:34.021458 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9\": container with ID starting with 6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9 not found: ID does not exist" containerID="6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9" Oct 03 18:01:34 crc kubenswrapper[5081]: I1003 18:01:34.021492 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9"} err="failed to get container status \"6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9\": rpc error: code = NotFound desc = could not find container \"6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9\": container with ID starting with 6bce9bfb3b17410072bb5c1cf83e84e7eb694d6b908afaca2c6b5de54641b1c9 not found: ID does not exist" Oct 03 18:01:34 crc kubenswrapper[5081]: I1003 18:01:34.021515 5081 scope.go:117] "RemoveContainer" containerID="5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7" Oct 03 18:01:34 crc kubenswrapper[5081]: E1003 18:01:34.021868 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7\": container with ID starting with 5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7 not found: ID does not exist" containerID="5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7" Oct 03 18:01:34 crc kubenswrapper[5081]: I1003 18:01:34.021895 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7"} err="failed to get container status \"5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7\": rpc error: code = NotFound desc = could not find container \"5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7\": container with ID starting with 5d0a27386fb8033cc1bf6794829024ca6e2a6d6a7e9aa95e9f013885e2b343e7 not found: ID does not exist" Oct 03 18:01:35 crc kubenswrapper[5081]: I1003 18:01:35.844326 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" path="/var/lib/kubelet/pods/a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2/volumes" Oct 03 18:01:46 crc kubenswrapper[5081]: I1003 18:01:46.828259 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:01:46 crc kubenswrapper[5081]: E1003 18:01:46.829116 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:01:58 crc kubenswrapper[5081]: I1003 18:01:58.828078 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:01:58 crc kubenswrapper[5081]: E1003 18:01:58.828827 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:02:11 crc kubenswrapper[5081]: I1003 18:02:11.835415 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:02:11 crc kubenswrapper[5081]: E1003 18:02:11.836282 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:02:23 crc kubenswrapper[5081]: I1003 18:02:23.828353 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:02:23 crc kubenswrapper[5081]: E1003 18:02:23.829489 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:02:38 crc kubenswrapper[5081]: I1003 18:02:38.828907 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:02:38 crc kubenswrapper[5081]: E1003 18:02:38.829654 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:02:52 crc kubenswrapper[5081]: I1003 18:02:52.828644 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:02:52 crc kubenswrapper[5081]: E1003 18:02:52.830246 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:02:56 crc kubenswrapper[5081]: I1003 18:02:56.997146 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-brhlr"] Oct 03 18:02:56 crc kubenswrapper[5081]: E1003 18:02:56.999735 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="extract-utilities" Oct 03 18:02:56 crc kubenswrapper[5081]: I1003 18:02:56.999891 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="extract-utilities" Oct 03 18:02:57 crc kubenswrapper[5081]: E1003 18:02:57.000000 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="extract-utilities" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.000078 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="extract-utilities" Oct 03 18:02:57 crc kubenswrapper[5081]: E1003 18:02:57.000171 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="extract-content" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.000252 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="extract-content" Oct 03 18:02:57 crc kubenswrapper[5081]: E1003 18:02:57.000345 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="extract-content" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.000419 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="extract-content" Oct 03 18:02:57 crc kubenswrapper[5081]: E1003 18:02:57.000497 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="registry-server" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.000595 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="registry-server" Oct 03 18:02:57 crc kubenswrapper[5081]: E1003 18:02:57.000702 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="registry-server" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.000783 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="registry-server" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.001334 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="de7d607f-d7f2-43da-9860-801a1f0166bb" containerName="registry-server" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.001455 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="a16d58cc-dd2d-4a0d-8fd2-d5107b4db3f2" containerName="registry-server" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.003980 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.032353 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-brhlr"] Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.197398 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsgvc\" (UniqueName: \"kubernetes.io/projected/f436cc06-ade6-4a55-a098-53386dc212c7-kube-api-access-jsgvc\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.197623 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-catalog-content\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.197686 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-utilities\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.298903 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-catalog-content\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.298978 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-utilities\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.299053 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsgvc\" (UniqueName: \"kubernetes.io/projected/f436cc06-ade6-4a55-a098-53386dc212c7-kube-api-access-jsgvc\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.299602 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-utilities\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.299671 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-catalog-content\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.318144 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsgvc\" (UniqueName: \"kubernetes.io/projected/f436cc06-ade6-4a55-a098-53386dc212c7-kube-api-access-jsgvc\") pod \"redhat-marketplace-brhlr\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.322039 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.792614 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-brhlr"] Oct 03 18:02:57 crc kubenswrapper[5081]: I1003 18:02:57.851107 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brhlr" event={"ID":"f436cc06-ade6-4a55-a098-53386dc212c7","Type":"ContainerStarted","Data":"535f82d68bdfd805dc664b57dfc0a72f329fd630acd8969cf444353b68cab3ef"} Oct 03 18:02:58 crc kubenswrapper[5081]: I1003 18:02:58.861813 5081 generic.go:334] "Generic (PLEG): container finished" podID="f436cc06-ade6-4a55-a098-53386dc212c7" containerID="3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f" exitCode=0 Oct 03 18:02:58 crc kubenswrapper[5081]: I1003 18:02:58.861892 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brhlr" event={"ID":"f436cc06-ade6-4a55-a098-53386dc212c7","Type":"ContainerDied","Data":"3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f"} Oct 03 18:03:00 crc kubenswrapper[5081]: I1003 18:03:00.889234 5081 generic.go:334] "Generic (PLEG): container finished" podID="f436cc06-ade6-4a55-a098-53386dc212c7" containerID="8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f" exitCode=0 Oct 03 18:03:00 crc kubenswrapper[5081]: I1003 18:03:00.889278 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brhlr" event={"ID":"f436cc06-ade6-4a55-a098-53386dc212c7","Type":"ContainerDied","Data":"8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f"} Oct 03 18:03:01 crc kubenswrapper[5081]: I1003 18:03:01.903664 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brhlr" event={"ID":"f436cc06-ade6-4a55-a098-53386dc212c7","Type":"ContainerStarted","Data":"51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed"} Oct 03 18:03:01 crc kubenswrapper[5081]: I1003 18:03:01.933650 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-brhlr" podStartSLOduration=3.332747615 podStartE2EDuration="5.933632159s" podCreationTimestamp="2025-10-03 18:02:56 +0000 UTC" firstStartedPulling="2025-10-03 18:02:58.864773504 +0000 UTC m=+9297.830330107" lastFinishedPulling="2025-10-03 18:03:01.465658048 +0000 UTC m=+9300.431214651" observedRunningTime="2025-10-03 18:03:01.923381466 +0000 UTC m=+9300.888938099" watchObservedRunningTime="2025-10-03 18:03:01.933632159 +0000 UTC m=+9300.899188772" Oct 03 18:03:07 crc kubenswrapper[5081]: I1003 18:03:07.322473 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:03:07 crc kubenswrapper[5081]: I1003 18:03:07.323453 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:03:07 crc kubenswrapper[5081]: I1003 18:03:07.828109 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:03:07 crc kubenswrapper[5081]: E1003 18:03:07.828431 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:03:08 crc kubenswrapper[5081]: I1003 18:03:08.147172 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:03:08 crc kubenswrapper[5081]: I1003 18:03:08.201413 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:03:08 crc kubenswrapper[5081]: I1003 18:03:08.395097 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-brhlr"] Oct 03 18:03:09 crc kubenswrapper[5081]: I1003 18:03:09.984455 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-brhlr" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="registry-server" containerID="cri-o://51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed" gracePeriod=2 Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.607253 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.662054 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-catalog-content\") pod \"f436cc06-ade6-4a55-a098-53386dc212c7\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.662146 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsgvc\" (UniqueName: \"kubernetes.io/projected/f436cc06-ade6-4a55-a098-53386dc212c7-kube-api-access-jsgvc\") pod \"f436cc06-ade6-4a55-a098-53386dc212c7\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.662193 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-utilities\") pod \"f436cc06-ade6-4a55-a098-53386dc212c7\" (UID: \"f436cc06-ade6-4a55-a098-53386dc212c7\") " Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.663083 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-utilities" (OuterVolumeSpecName: "utilities") pod "f436cc06-ade6-4a55-a098-53386dc212c7" (UID: "f436cc06-ade6-4a55-a098-53386dc212c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.669496 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f436cc06-ade6-4a55-a098-53386dc212c7-kube-api-access-jsgvc" (OuterVolumeSpecName: "kube-api-access-jsgvc") pod "f436cc06-ade6-4a55-a098-53386dc212c7" (UID: "f436cc06-ade6-4a55-a098-53386dc212c7"). InnerVolumeSpecName "kube-api-access-jsgvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.677700 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f436cc06-ade6-4a55-a098-53386dc212c7" (UID: "f436cc06-ade6-4a55-a098-53386dc212c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.765034 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.765071 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f436cc06-ade6-4a55-a098-53386dc212c7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:03:10 crc kubenswrapper[5081]: I1003 18:03:10.765082 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsgvc\" (UniqueName: \"kubernetes.io/projected/f436cc06-ade6-4a55-a098-53386dc212c7-kube-api-access-jsgvc\") on node \"crc\" DevicePath \"\"" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.010003 5081 generic.go:334] "Generic (PLEG): container finished" podID="f436cc06-ade6-4a55-a098-53386dc212c7" containerID="51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed" exitCode=0 Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.010050 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brhlr" event={"ID":"f436cc06-ade6-4a55-a098-53386dc212c7","Type":"ContainerDied","Data":"51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed"} Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.010075 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brhlr" event={"ID":"f436cc06-ade6-4a55-a098-53386dc212c7","Type":"ContainerDied","Data":"535f82d68bdfd805dc664b57dfc0a72f329fd630acd8969cf444353b68cab3ef"} Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.010092 5081 scope.go:117] "RemoveContainer" containerID="51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.010233 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brhlr" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.049081 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-brhlr"] Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.056077 5081 scope.go:117] "RemoveContainer" containerID="8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.057950 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-brhlr"] Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.076885 5081 scope.go:117] "RemoveContainer" containerID="3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.136108 5081 scope.go:117] "RemoveContainer" containerID="51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed" Oct 03 18:03:11 crc kubenswrapper[5081]: E1003 18:03:11.137029 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed\": container with ID starting with 51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed not found: ID does not exist" containerID="51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.137066 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed"} err="failed to get container status \"51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed\": rpc error: code = NotFound desc = could not find container \"51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed\": container with ID starting with 51d900b0603a5787fab9162f2b2193872ff689c498bb4b4ebf16e7b592a5bfed not found: ID does not exist" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.137120 5081 scope.go:117] "RemoveContainer" containerID="8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f" Oct 03 18:03:11 crc kubenswrapper[5081]: E1003 18:03:11.137469 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f\": container with ID starting with 8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f not found: ID does not exist" containerID="8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.137509 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f"} err="failed to get container status \"8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f\": rpc error: code = NotFound desc = could not find container \"8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f\": container with ID starting with 8b39fb68b14105f4a4815efe5800cea54a2594dee474e7875a3ecfab7fed801f not found: ID does not exist" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.137538 5081 scope.go:117] "RemoveContainer" containerID="3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f" Oct 03 18:03:11 crc kubenswrapper[5081]: E1003 18:03:11.138339 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f\": container with ID starting with 3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f not found: ID does not exist" containerID="3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.138360 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f"} err="failed to get container status \"3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f\": rpc error: code = NotFound desc = could not find container \"3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f\": container with ID starting with 3667b477edee6cf7f9021bea997245f07b4575168d30375d0a2cfe70a0a46c4f not found: ID does not exist" Oct 03 18:03:11 crc kubenswrapper[5081]: I1003 18:03:11.841450 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" path="/var/lib/kubelet/pods/f436cc06-ade6-4a55-a098-53386dc212c7/volumes" Oct 03 18:03:22 crc kubenswrapper[5081]: I1003 18:03:22.827666 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:03:22 crc kubenswrapper[5081]: E1003 18:03:22.828510 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:03:33 crc kubenswrapper[5081]: I1003 18:03:33.832903 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:03:33 crc kubenswrapper[5081]: E1003 18:03:33.834214 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:03:48 crc kubenswrapper[5081]: I1003 18:03:48.828347 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:03:48 crc kubenswrapper[5081]: E1003 18:03:48.829152 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:04:02 crc kubenswrapper[5081]: I1003 18:04:02.827629 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:04:02 crc kubenswrapper[5081]: E1003 18:04:02.828283 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:04:15 crc kubenswrapper[5081]: I1003 18:04:15.829770 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:04:15 crc kubenswrapper[5081]: E1003 18:04:15.830643 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:04:30 crc kubenswrapper[5081]: I1003 18:04:30.828040 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:04:30 crc kubenswrapper[5081]: E1003 18:04:30.829043 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:04:41 crc kubenswrapper[5081]: I1003 18:04:41.837644 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:04:41 crc kubenswrapper[5081]: E1003 18:04:41.838420 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:04:54 crc kubenswrapper[5081]: I1003 18:04:54.828491 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:04:54 crc kubenswrapper[5081]: E1003 18:04:54.829434 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:05:08 crc kubenswrapper[5081]: I1003 18:05:08.828022 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:05:08 crc kubenswrapper[5081]: E1003 18:05:08.828865 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:05:19 crc kubenswrapper[5081]: I1003 18:05:19.827939 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:05:19 crc kubenswrapper[5081]: E1003 18:05:19.830627 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:05:34 crc kubenswrapper[5081]: I1003 18:05:34.827768 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:05:34 crc kubenswrapper[5081]: E1003 18:05:34.828632 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:05:49 crc kubenswrapper[5081]: I1003 18:05:49.827758 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:05:49 crc kubenswrapper[5081]: E1003 18:05:49.828608 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:06:01 crc kubenswrapper[5081]: I1003 18:06:01.841093 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:06:01 crc kubenswrapper[5081]: E1003 18:06:01.841869 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:06:15 crc kubenswrapper[5081]: I1003 18:06:15.828344 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:06:15 crc kubenswrapper[5081]: E1003 18:06:15.829936 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:06:28 crc kubenswrapper[5081]: I1003 18:06:28.828084 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:06:28 crc kubenswrapper[5081]: E1003 18:06:28.828889 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:06:39 crc kubenswrapper[5081]: I1003 18:06:39.829076 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:06:40 crc kubenswrapper[5081]: I1003 18:06:40.292187 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"b8e7d9adfb786987c1bb81783b01b16de5c84deb8a62eab39c30d68b9ffa679e"} Oct 03 18:07:20 crc kubenswrapper[5081]: I1003 18:07:20.988156 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n8v9c"] Oct 03 18:07:20 crc kubenswrapper[5081]: E1003 18:07:20.989471 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="registry-server" Oct 03 18:07:20 crc kubenswrapper[5081]: I1003 18:07:20.989495 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="registry-server" Oct 03 18:07:20 crc kubenswrapper[5081]: E1003 18:07:20.989531 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="extract-content" Oct 03 18:07:20 crc kubenswrapper[5081]: I1003 18:07:20.989542 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="extract-content" Oct 03 18:07:20 crc kubenswrapper[5081]: E1003 18:07:20.989610 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="extract-utilities" Oct 03 18:07:20 crc kubenswrapper[5081]: I1003 18:07:20.989624 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="extract-utilities" Oct 03 18:07:20 crc kubenswrapper[5081]: I1003 18:07:20.989985 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="f436cc06-ade6-4a55-a098-53386dc212c7" containerName="registry-server" Oct 03 18:07:20 crc kubenswrapper[5081]: I1003 18:07:20.992606 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.003223 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n8v9c"] Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.156199 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwltz\" (UniqueName: \"kubernetes.io/projected/eea31807-4533-4b83-a4f3-9b94f9d3327b-kube-api-access-rwltz\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.156307 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-utilities\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.156486 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-catalog-content\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.258536 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwltz\" (UniqueName: \"kubernetes.io/projected/eea31807-4533-4b83-a4f3-9b94f9d3327b-kube-api-access-rwltz\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.258635 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-utilities\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.258785 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-catalog-content\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.259378 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-utilities\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.259396 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-catalog-content\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.281560 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwltz\" (UniqueName: \"kubernetes.io/projected/eea31807-4533-4b83-a4f3-9b94f9d3327b-kube-api-access-rwltz\") pod \"redhat-operators-n8v9c\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.375288 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:21 crc kubenswrapper[5081]: I1003 18:07:21.893402 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n8v9c"] Oct 03 18:07:22 crc kubenswrapper[5081]: I1003 18:07:22.785904 5081 generic.go:334] "Generic (PLEG): container finished" podID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerID="5580c959112dc2bfda0130adfe108bc60d7f0330850f15d8831e865cdaf9cf47" exitCode=0 Oct 03 18:07:22 crc kubenswrapper[5081]: I1003 18:07:22.786091 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8v9c" event={"ID":"eea31807-4533-4b83-a4f3-9b94f9d3327b","Type":"ContainerDied","Data":"5580c959112dc2bfda0130adfe108bc60d7f0330850f15d8831e865cdaf9cf47"} Oct 03 18:07:22 crc kubenswrapper[5081]: I1003 18:07:22.786479 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8v9c" event={"ID":"eea31807-4533-4b83-a4f3-9b94f9d3327b","Type":"ContainerStarted","Data":"db627937d5d1a3708908dca66fdc12d58db94fb2331032b0e75be9d6ea4d721c"} Oct 03 18:07:22 crc kubenswrapper[5081]: I1003 18:07:22.788443 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 18:07:24 crc kubenswrapper[5081]: I1003 18:07:24.806523 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8v9c" event={"ID":"eea31807-4533-4b83-a4f3-9b94f9d3327b","Type":"ContainerStarted","Data":"5c048f016d4ab778895ac5ad55cb569550b2399272abf62f0040df45fbaf8300"} Oct 03 18:07:27 crc kubenswrapper[5081]: I1003 18:07:27.834688 5081 generic.go:334] "Generic (PLEG): container finished" podID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerID="5c048f016d4ab778895ac5ad55cb569550b2399272abf62f0040df45fbaf8300" exitCode=0 Oct 03 18:07:27 crc kubenswrapper[5081]: I1003 18:07:27.838248 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8v9c" event={"ID":"eea31807-4533-4b83-a4f3-9b94f9d3327b","Type":"ContainerDied","Data":"5c048f016d4ab778895ac5ad55cb569550b2399272abf62f0040df45fbaf8300"} Oct 03 18:07:28 crc kubenswrapper[5081]: I1003 18:07:28.845633 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8v9c" event={"ID":"eea31807-4533-4b83-a4f3-9b94f9d3327b","Type":"ContainerStarted","Data":"fcd45e533aa11e0dc203f69513b8fd3429258863d5d9f5cb711d322ab9b5d40b"} Oct 03 18:07:28 crc kubenswrapper[5081]: I1003 18:07:28.880794 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n8v9c" podStartSLOduration=3.401733407 podStartE2EDuration="8.880752735s" podCreationTimestamp="2025-10-03 18:07:20 +0000 UTC" firstStartedPulling="2025-10-03 18:07:22.788150594 +0000 UTC m=+9561.753707207" lastFinishedPulling="2025-10-03 18:07:28.267169922 +0000 UTC m=+9567.232726535" observedRunningTime="2025-10-03 18:07:28.872340325 +0000 UTC m=+9567.837896958" watchObservedRunningTime="2025-10-03 18:07:28.880752735 +0000 UTC m=+9567.846309348" Oct 03 18:07:31 crc kubenswrapper[5081]: I1003 18:07:31.376362 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:31 crc kubenswrapper[5081]: I1003 18:07:31.376939 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:32 crc kubenswrapper[5081]: I1003 18:07:32.441061 5081 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n8v9c" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="registry-server" probeResult="failure" output=< Oct 03 18:07:32 crc kubenswrapper[5081]: timeout: failed to connect service ":50051" within 1s Oct 03 18:07:32 crc kubenswrapper[5081]: > Oct 03 18:07:41 crc kubenswrapper[5081]: I1003 18:07:41.453370 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:41 crc kubenswrapper[5081]: I1003 18:07:41.534470 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:41 crc kubenswrapper[5081]: I1003 18:07:41.694754 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n8v9c"] Oct 03 18:07:43 crc kubenswrapper[5081]: I1003 18:07:43.001206 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n8v9c" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="registry-server" containerID="cri-o://fcd45e533aa11e0dc203f69513b8fd3429258863d5d9f5cb711d322ab9b5d40b" gracePeriod=2 Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.012547 5081 generic.go:334] "Generic (PLEG): container finished" podID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerID="fcd45e533aa11e0dc203f69513b8fd3429258863d5d9f5cb711d322ab9b5d40b" exitCode=0 Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.012832 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8v9c" event={"ID":"eea31807-4533-4b83-a4f3-9b94f9d3327b","Type":"ContainerDied","Data":"fcd45e533aa11e0dc203f69513b8fd3429258863d5d9f5cb711d322ab9b5d40b"} Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.012857 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8v9c" event={"ID":"eea31807-4533-4b83-a4f3-9b94f9d3327b","Type":"ContainerDied","Data":"db627937d5d1a3708908dca66fdc12d58db94fb2331032b0e75be9d6ea4d721c"} Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.012884 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db627937d5d1a3708908dca66fdc12d58db94fb2331032b0e75be9d6ea4d721c" Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.018885 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.181645 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-utilities\") pod \"eea31807-4533-4b83-a4f3-9b94f9d3327b\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.182600 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwltz\" (UniqueName: \"kubernetes.io/projected/eea31807-4533-4b83-a4f3-9b94f9d3327b-kube-api-access-rwltz\") pod \"eea31807-4533-4b83-a4f3-9b94f9d3327b\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.183477 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-catalog-content\") pod \"eea31807-4533-4b83-a4f3-9b94f9d3327b\" (UID: \"eea31807-4533-4b83-a4f3-9b94f9d3327b\") " Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.184637 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-utilities" (OuterVolumeSpecName: "utilities") pod "eea31807-4533-4b83-a4f3-9b94f9d3327b" (UID: "eea31807-4533-4b83-a4f3-9b94f9d3327b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.192255 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea31807-4533-4b83-a4f3-9b94f9d3327b-kube-api-access-rwltz" (OuterVolumeSpecName: "kube-api-access-rwltz") pod "eea31807-4533-4b83-a4f3-9b94f9d3327b" (UID: "eea31807-4533-4b83-a4f3-9b94f9d3327b"). InnerVolumeSpecName "kube-api-access-rwltz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.280766 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eea31807-4533-4b83-a4f3-9b94f9d3327b" (UID: "eea31807-4533-4b83-a4f3-9b94f9d3327b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.286410 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.286459 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eea31807-4533-4b83-a4f3-9b94f9d3327b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:07:44 crc kubenswrapper[5081]: I1003 18:07:44.286471 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwltz\" (UniqueName: \"kubernetes.io/projected/eea31807-4533-4b83-a4f3-9b94f9d3327b-kube-api-access-rwltz\") on node \"crc\" DevicePath \"\"" Oct 03 18:07:45 crc kubenswrapper[5081]: I1003 18:07:45.021680 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8v9c" Oct 03 18:07:45 crc kubenswrapper[5081]: I1003 18:07:45.070300 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n8v9c"] Oct 03 18:07:45 crc kubenswrapper[5081]: I1003 18:07:45.079612 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n8v9c"] Oct 03 18:07:45 crc kubenswrapper[5081]: I1003 18:07:45.850339 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" path="/var/lib/kubelet/pods/eea31807-4533-4b83-a4f3-9b94f9d3327b/volumes" Oct 03 18:09:00 crc kubenswrapper[5081]: I1003 18:09:00.648620 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:09:00 crc kubenswrapper[5081]: I1003 18:09:00.649629 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:09:30 crc kubenswrapper[5081]: I1003 18:09:30.648121 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:09:30 crc kubenswrapper[5081]: I1003 18:09:30.648741 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:10:00 crc kubenswrapper[5081]: I1003 18:10:00.647691 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:10:00 crc kubenswrapper[5081]: I1003 18:10:00.648306 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:10:00 crc kubenswrapper[5081]: I1003 18:10:00.648355 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 18:10:00 crc kubenswrapper[5081]: I1003 18:10:00.649303 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b8e7d9adfb786987c1bb81783b01b16de5c84deb8a62eab39c30d68b9ffa679e"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 18:10:00 crc kubenswrapper[5081]: I1003 18:10:00.649359 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://b8e7d9adfb786987c1bb81783b01b16de5c84deb8a62eab39c30d68b9ffa679e" gracePeriod=600 Oct 03 18:10:01 crc kubenswrapper[5081]: I1003 18:10:01.432534 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="b8e7d9adfb786987c1bb81783b01b16de5c84deb8a62eab39c30d68b9ffa679e" exitCode=0 Oct 03 18:10:01 crc kubenswrapper[5081]: I1003 18:10:01.432597 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"b8e7d9adfb786987c1bb81783b01b16de5c84deb8a62eab39c30d68b9ffa679e"} Oct 03 18:10:01 crc kubenswrapper[5081]: I1003 18:10:01.433112 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2"} Oct 03 18:10:01 crc kubenswrapper[5081]: I1003 18:10:01.433129 5081 scope.go:117] "RemoveContainer" containerID="e294c524a2bdda5a2f62c3c9aac85782bc3d6d393f93fbe15710b7f284cdc401" Oct 03 18:10:21 crc kubenswrapper[5081]: I1003 18:10:21.716237 5081 generic.go:334] "Generic (PLEG): container finished" podID="8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" containerID="d59021c10c703b9e672ab808683bcb1aa9a1fd1e474185f1a46d80093767bf80" exitCode=0 Oct 03 18:10:21 crc kubenswrapper[5081]: I1003 18:10:21.716350 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" event={"ID":"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c","Type":"ContainerDied","Data":"d59021c10c703b9e672ab808683bcb1aa9a1fd1e474185f1a46d80093767bf80"} Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.190190 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.319916 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-1\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320292 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-0\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320409 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ssh-key\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320442 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-1\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320494 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-combined-ca-bundle\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320532 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-0\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320629 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5m4r\" (UniqueName: \"kubernetes.io/projected/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-kube-api-access-d5m4r\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320701 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ceph\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320734 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-0\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320797 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-1\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.320894 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-inventory\") pod \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\" (UID: \"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c\") " Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.338843 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.340530 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ceph" (OuterVolumeSpecName: "ceph") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.342878 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-kube-api-access-d5m4r" (OuterVolumeSpecName: "kube-api-access-d5m4r") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "kube-api-access-d5m4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.351816 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.354552 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.355714 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.355850 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.357476 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.357919 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.360093 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-inventory" (OuterVolumeSpecName: "inventory") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.360311 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" (UID: "8cf6daab-c22a-4cd4-8d88-64a2bf39e05c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424273 5081 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424323 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424340 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424352 5081 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424365 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424376 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424389 5081 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424402 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5m4r\" (UniqueName: \"kubernetes.io/projected/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-kube-api-access-d5m4r\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424414 5081 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-ceph\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424423 5081 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.424435 5081 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8cf6daab-c22a-4cd4-8d88-64a2bf39e05c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.737544 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" event={"ID":"8cf6daab-c22a-4cd4-8d88-64a2bf39e05c","Type":"ContainerDied","Data":"31580c26fa6d9640ac012838673426bd61c9de76acc106d3bd4b3c3f70d060e2"} Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.737828 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31580c26fa6d9640ac012838673426bd61c9de76acc106d3bd4b3c3f70d060e2" Oct 03 18:10:23 crc kubenswrapper[5081]: I1003 18:10:23.737720 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.254581 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kn72k"] Oct 03 18:11:43 crc kubenswrapper[5081]: E1003 18:11:43.255670 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="registry-server" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.255684 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="registry-server" Oct 03 18:11:43 crc kubenswrapper[5081]: E1003 18:11:43.255703 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="extract-content" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.255711 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="extract-content" Oct 03 18:11:43 crc kubenswrapper[5081]: E1003 18:11:43.255732 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.255739 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 03 18:11:43 crc kubenswrapper[5081]: E1003 18:11:43.255769 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="extract-utilities" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.255776 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="extract-utilities" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.255970 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf6daab-c22a-4cd4-8d88-64a2bf39e05c" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.255988 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea31807-4533-4b83-a4f3-9b94f9d3327b" containerName="registry-server" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.257523 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.278950 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kn72k"] Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.370605 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-utilities\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.370651 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-catalog-content\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.370743 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7s45w\" (UniqueName: \"kubernetes.io/projected/cb451c9b-8243-493b-9af8-1a5d0ce70470-kube-api-access-7s45w\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.473810 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7s45w\" (UniqueName: \"kubernetes.io/projected/cb451c9b-8243-493b-9af8-1a5d0ce70470-kube-api-access-7s45w\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.474029 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-utilities\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.474068 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-catalog-content\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.474800 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-catalog-content\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.475205 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-utilities\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.495229 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7s45w\" (UniqueName: \"kubernetes.io/projected/cb451c9b-8243-493b-9af8-1a5d0ce70470-kube-api-access-7s45w\") pod \"certified-operators-kn72k\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:43 crc kubenswrapper[5081]: I1003 18:11:43.583774 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:44 crc kubenswrapper[5081]: I1003 18:11:44.246506 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kn72k"] Oct 03 18:11:44 crc kubenswrapper[5081]: I1003 18:11:44.592915 5081 generic.go:334] "Generic (PLEG): container finished" podID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerID="31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7" exitCode=0 Oct 03 18:11:44 crc kubenswrapper[5081]: I1003 18:11:44.593063 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kn72k" event={"ID":"cb451c9b-8243-493b-9af8-1a5d0ce70470","Type":"ContainerDied","Data":"31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7"} Oct 03 18:11:44 crc kubenswrapper[5081]: I1003 18:11:44.595157 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kn72k" event={"ID":"cb451c9b-8243-493b-9af8-1a5d0ce70470","Type":"ContainerStarted","Data":"327a7fa59322dced6438473b54f770d85fe42f81f9e129d064476b44928bc11f"} Oct 03 18:11:47 crc kubenswrapper[5081]: I1003 18:11:47.633316 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kn72k" event={"ID":"cb451c9b-8243-493b-9af8-1a5d0ce70470","Type":"ContainerStarted","Data":"9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c"} Oct 03 18:11:48 crc kubenswrapper[5081]: I1003 18:11:48.645764 5081 generic.go:334] "Generic (PLEG): container finished" podID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerID="9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c" exitCode=0 Oct 03 18:11:48 crc kubenswrapper[5081]: I1003 18:11:48.645847 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kn72k" event={"ID":"cb451c9b-8243-493b-9af8-1a5d0ce70470","Type":"ContainerDied","Data":"9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c"} Oct 03 18:11:49 crc kubenswrapper[5081]: I1003 18:11:49.662655 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kn72k" event={"ID":"cb451c9b-8243-493b-9af8-1a5d0ce70470","Type":"ContainerStarted","Data":"ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd"} Oct 03 18:11:49 crc kubenswrapper[5081]: I1003 18:11:49.686450 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kn72k" podStartSLOduration=2.208013343 podStartE2EDuration="6.686428714s" podCreationTimestamp="2025-10-03 18:11:43 +0000 UTC" firstStartedPulling="2025-10-03 18:11:44.596828809 +0000 UTC m=+9823.562385442" lastFinishedPulling="2025-10-03 18:11:49.07524419 +0000 UTC m=+9828.040800813" observedRunningTime="2025-10-03 18:11:49.681197065 +0000 UTC m=+9828.646753678" watchObservedRunningTime="2025-10-03 18:11:49.686428714 +0000 UTC m=+9828.651985327" Oct 03 18:11:53 crc kubenswrapper[5081]: I1003 18:11:53.584003 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:53 crc kubenswrapper[5081]: I1003 18:11:53.584587 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:53 crc kubenswrapper[5081]: I1003 18:11:53.635703 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.639008 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n8qms"] Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.642453 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.662088 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n8qms"] Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.687530 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcjcp\" (UniqueName: \"kubernetes.io/projected/065c7e94-5237-4aa8-9df0-60b9e3492800-kube-api-access-rcjcp\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.688008 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-utilities\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.688062 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-catalog-content\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.794844 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcjcp\" (UniqueName: \"kubernetes.io/projected/065c7e94-5237-4aa8-9df0-60b9e3492800-kube-api-access-rcjcp\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.795087 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-utilities\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.795124 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-catalog-content\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.795744 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-catalog-content\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:57 crc kubenswrapper[5081]: I1003 18:11:57.796661 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-utilities\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:58 crc kubenswrapper[5081]: I1003 18:11:58.202871 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcjcp\" (UniqueName: \"kubernetes.io/projected/065c7e94-5237-4aa8-9df0-60b9e3492800-kube-api-access-rcjcp\") pod \"community-operators-n8qms\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:58 crc kubenswrapper[5081]: I1003 18:11:58.296638 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:11:58 crc kubenswrapper[5081]: I1003 18:11:58.815960 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n8qms"] Oct 03 18:11:59 crc kubenswrapper[5081]: I1003 18:11:59.794344 5081 generic.go:334] "Generic (PLEG): container finished" podID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerID="86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50" exitCode=0 Oct 03 18:11:59 crc kubenswrapper[5081]: I1003 18:11:59.794424 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8qms" event={"ID":"065c7e94-5237-4aa8-9df0-60b9e3492800","Type":"ContainerDied","Data":"86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50"} Oct 03 18:11:59 crc kubenswrapper[5081]: I1003 18:11:59.794660 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8qms" event={"ID":"065c7e94-5237-4aa8-9df0-60b9e3492800","Type":"ContainerStarted","Data":"8db17766f8a112795f3af4580b974dfeaadeffd5640f5f025c6655fdcf4e40f4"} Oct 03 18:12:01 crc kubenswrapper[5081]: I1003 18:12:01.875296 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8qms" event={"ID":"065c7e94-5237-4aa8-9df0-60b9e3492800","Type":"ContainerStarted","Data":"659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce"} Oct 03 18:12:02 crc kubenswrapper[5081]: I1003 18:12:02.865126 5081 generic.go:334] "Generic (PLEG): container finished" podID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerID="659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce" exitCode=0 Oct 03 18:12:02 crc kubenswrapper[5081]: I1003 18:12:02.865249 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8qms" event={"ID":"065c7e94-5237-4aa8-9df0-60b9e3492800","Type":"ContainerDied","Data":"659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce"} Oct 03 18:12:03 crc kubenswrapper[5081]: I1003 18:12:03.643762 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:12:03 crc kubenswrapper[5081]: I1003 18:12:03.892996 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8qms" event={"ID":"065c7e94-5237-4aa8-9df0-60b9e3492800","Type":"ContainerStarted","Data":"93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7"} Oct 03 18:12:03 crc kubenswrapper[5081]: I1003 18:12:03.921438 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n8qms" podStartSLOduration=3.305470087 podStartE2EDuration="6.921418561s" podCreationTimestamp="2025-10-03 18:11:57 +0000 UTC" firstStartedPulling="2025-10-03 18:11:59.797840438 +0000 UTC m=+9838.763397081" lastFinishedPulling="2025-10-03 18:12:03.413788942 +0000 UTC m=+9842.379345555" observedRunningTime="2025-10-03 18:12:03.91719971 +0000 UTC m=+9842.882756333" watchObservedRunningTime="2025-10-03 18:12:03.921418561 +0000 UTC m=+9842.886975194" Oct 03 18:12:03 crc kubenswrapper[5081]: I1003 18:12:03.995348 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kn72k"] Oct 03 18:12:03 crc kubenswrapper[5081]: I1003 18:12:03.995556 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kn72k" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="registry-server" containerID="cri-o://ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd" gracePeriod=2 Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.526830 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.546161 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7s45w\" (UniqueName: \"kubernetes.io/projected/cb451c9b-8243-493b-9af8-1a5d0ce70470-kube-api-access-7s45w\") pod \"cb451c9b-8243-493b-9af8-1a5d0ce70470\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.546277 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-utilities\") pod \"cb451c9b-8243-493b-9af8-1a5d0ce70470\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.546459 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-catalog-content\") pod \"cb451c9b-8243-493b-9af8-1a5d0ce70470\" (UID: \"cb451c9b-8243-493b-9af8-1a5d0ce70470\") " Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.547212 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-utilities" (OuterVolumeSpecName: "utilities") pod "cb451c9b-8243-493b-9af8-1a5d0ce70470" (UID: "cb451c9b-8243-493b-9af8-1a5d0ce70470"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.552793 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb451c9b-8243-493b-9af8-1a5d0ce70470-kube-api-access-7s45w" (OuterVolumeSpecName: "kube-api-access-7s45w") pod "cb451c9b-8243-493b-9af8-1a5d0ce70470" (UID: "cb451c9b-8243-493b-9af8-1a5d0ce70470"). InnerVolumeSpecName "kube-api-access-7s45w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.604639 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb451c9b-8243-493b-9af8-1a5d0ce70470" (UID: "cb451c9b-8243-493b-9af8-1a5d0ce70470"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.649294 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.649322 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7s45w\" (UniqueName: \"kubernetes.io/projected/cb451c9b-8243-493b-9af8-1a5d0ce70470-kube-api-access-7s45w\") on node \"crc\" DevicePath \"\"" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.649331 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb451c9b-8243-493b-9af8-1a5d0ce70470-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.907531 5081 generic.go:334] "Generic (PLEG): container finished" podID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerID="ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd" exitCode=0 Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.907587 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kn72k" event={"ID":"cb451c9b-8243-493b-9af8-1a5d0ce70470","Type":"ContainerDied","Data":"ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd"} Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.907613 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kn72k" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.907634 5081 scope.go:117] "RemoveContainer" containerID="ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.907622 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kn72k" event={"ID":"cb451c9b-8243-493b-9af8-1a5d0ce70470","Type":"ContainerDied","Data":"327a7fa59322dced6438473b54f770d85fe42f81f9e129d064476b44928bc11f"} Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.944292 5081 scope.go:117] "RemoveContainer" containerID="9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c" Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.953955 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kn72k"] Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.966519 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kn72k"] Oct 03 18:12:04 crc kubenswrapper[5081]: I1003 18:12:04.980661 5081 scope.go:117] "RemoveContainer" containerID="31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7" Oct 03 18:12:05 crc kubenswrapper[5081]: I1003 18:12:05.010678 5081 scope.go:117] "RemoveContainer" containerID="ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd" Oct 03 18:12:05 crc kubenswrapper[5081]: E1003 18:12:05.011184 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd\": container with ID starting with ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd not found: ID does not exist" containerID="ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd" Oct 03 18:12:05 crc kubenswrapper[5081]: I1003 18:12:05.011208 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd"} err="failed to get container status \"ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd\": rpc error: code = NotFound desc = could not find container \"ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd\": container with ID starting with ae34624dcf54558f5db11102e03635329d467adafab2cba681ac8df4e00486bd not found: ID does not exist" Oct 03 18:12:05 crc kubenswrapper[5081]: I1003 18:12:05.011228 5081 scope.go:117] "RemoveContainer" containerID="9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c" Oct 03 18:12:05 crc kubenswrapper[5081]: E1003 18:12:05.011672 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c\": container with ID starting with 9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c not found: ID does not exist" containerID="9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c" Oct 03 18:12:05 crc kubenswrapper[5081]: I1003 18:12:05.011689 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c"} err="failed to get container status \"9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c\": rpc error: code = NotFound desc = could not find container \"9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c\": container with ID starting with 9ac773a7cce6606a4c84baecd6d419dd5cae6ba3c7b6b7406868d42429d88c5c not found: ID does not exist" Oct 03 18:12:05 crc kubenswrapper[5081]: I1003 18:12:05.011701 5081 scope.go:117] "RemoveContainer" containerID="31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7" Oct 03 18:12:05 crc kubenswrapper[5081]: E1003 18:12:05.012019 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7\": container with ID starting with 31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7 not found: ID does not exist" containerID="31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7" Oct 03 18:12:05 crc kubenswrapper[5081]: I1003 18:12:05.012035 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7"} err="failed to get container status \"31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7\": rpc error: code = NotFound desc = could not find container \"31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7\": container with ID starting with 31df46bece734c239c36ff5e564204b67be7bdd56216cfd2d82bcb81cf567ba7 not found: ID does not exist" Oct 03 18:12:05 crc kubenswrapper[5081]: I1003 18:12:05.848983 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" path="/var/lib/kubelet/pods/cb451c9b-8243-493b-9af8-1a5d0ce70470/volumes" Oct 03 18:12:08 crc kubenswrapper[5081]: I1003 18:12:08.297796 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:12:08 crc kubenswrapper[5081]: I1003 18:12:08.298627 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:12:08 crc kubenswrapper[5081]: I1003 18:12:08.348087 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:12:09 crc kubenswrapper[5081]: I1003 18:12:09.007492 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:12:09 crc kubenswrapper[5081]: I1003 18:12:09.195309 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n8qms"] Oct 03 18:12:10 crc kubenswrapper[5081]: I1003 18:12:10.981304 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n8qms" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="registry-server" containerID="cri-o://93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7" gracePeriod=2 Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.514422 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.607357 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-catalog-content\") pod \"065c7e94-5237-4aa8-9df0-60b9e3492800\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.607455 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcjcp\" (UniqueName: \"kubernetes.io/projected/065c7e94-5237-4aa8-9df0-60b9e3492800-kube-api-access-rcjcp\") pod \"065c7e94-5237-4aa8-9df0-60b9e3492800\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.607604 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-utilities\") pod \"065c7e94-5237-4aa8-9df0-60b9e3492800\" (UID: \"065c7e94-5237-4aa8-9df0-60b9e3492800\") " Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.608759 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-utilities" (OuterVolumeSpecName: "utilities") pod "065c7e94-5237-4aa8-9df0-60b9e3492800" (UID: "065c7e94-5237-4aa8-9df0-60b9e3492800"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.613445 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/065c7e94-5237-4aa8-9df0-60b9e3492800-kube-api-access-rcjcp" (OuterVolumeSpecName: "kube-api-access-rcjcp") pod "065c7e94-5237-4aa8-9df0-60b9e3492800" (UID: "065c7e94-5237-4aa8-9df0-60b9e3492800"). InnerVolumeSpecName "kube-api-access-rcjcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.661717 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "065c7e94-5237-4aa8-9df0-60b9e3492800" (UID: "065c7e94-5237-4aa8-9df0-60b9e3492800"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.709788 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcjcp\" (UniqueName: \"kubernetes.io/projected/065c7e94-5237-4aa8-9df0-60b9e3492800-kube-api-access-rcjcp\") on node \"crc\" DevicePath \"\"" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.709826 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.709837 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/065c7e94-5237-4aa8-9df0-60b9e3492800-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.993732 5081 generic.go:334] "Generic (PLEG): container finished" podID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerID="93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7" exitCode=0 Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.993787 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8qms" event={"ID":"065c7e94-5237-4aa8-9df0-60b9e3492800","Type":"ContainerDied","Data":"93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7"} Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.993818 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8qms" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.993843 5081 scope.go:117] "RemoveContainer" containerID="93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7" Oct 03 18:12:11 crc kubenswrapper[5081]: I1003 18:12:11.993826 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8qms" event={"ID":"065c7e94-5237-4aa8-9df0-60b9e3492800","Type":"ContainerDied","Data":"8db17766f8a112795f3af4580b974dfeaadeffd5640f5f025c6655fdcf4e40f4"} Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.019740 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n8qms"] Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.025930 5081 scope.go:117] "RemoveContainer" containerID="659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce" Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.029710 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n8qms"] Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.068948 5081 scope.go:117] "RemoveContainer" containerID="86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50" Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.106080 5081 scope.go:117] "RemoveContainer" containerID="93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7" Oct 03 18:12:12 crc kubenswrapper[5081]: E1003 18:12:12.107158 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7\": container with ID starting with 93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7 not found: ID does not exist" containerID="93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7" Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.107209 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7"} err="failed to get container status \"93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7\": rpc error: code = NotFound desc = could not find container \"93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7\": container with ID starting with 93fffc75960986e978f2603ebdbd1c6fd0b6deead52bedebe7dc5824ac0cb1e7 not found: ID does not exist" Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.107239 5081 scope.go:117] "RemoveContainer" containerID="659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce" Oct 03 18:12:12 crc kubenswrapper[5081]: E1003 18:12:12.107859 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce\": container with ID starting with 659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce not found: ID does not exist" containerID="659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce" Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.108030 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce"} err="failed to get container status \"659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce\": rpc error: code = NotFound desc = could not find container \"659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce\": container with ID starting with 659884d82181e51d72611c9cc23de031e94d6f9919f637d6a16f910a34df92ce not found: ID does not exist" Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.108084 5081 scope.go:117] "RemoveContainer" containerID="86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50" Oct 03 18:12:12 crc kubenswrapper[5081]: E1003 18:12:12.108506 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50\": container with ID starting with 86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50 not found: ID does not exist" containerID="86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50" Oct 03 18:12:12 crc kubenswrapper[5081]: I1003 18:12:12.108578 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50"} err="failed to get container status \"86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50\": rpc error: code = NotFound desc = could not find container \"86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50\": container with ID starting with 86dfd2292d484460aa36e760431739295857757bb8ae2e51dd986a0c8a15cf50 not found: ID does not exist" Oct 03 18:12:13 crc kubenswrapper[5081]: I1003 18:12:13.839610 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" path="/var/lib/kubelet/pods/065c7e94-5237-4aa8-9df0-60b9e3492800/volumes" Oct 03 18:12:30 crc kubenswrapper[5081]: I1003 18:12:30.647443 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:12:30 crc kubenswrapper[5081]: I1003 18:12:30.647940 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:12:36 crc kubenswrapper[5081]: I1003 18:12:36.847195 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Oct 03 18:12:36 crc kubenswrapper[5081]: I1003 18:12:36.847966 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="21ae62aa-1b0a-4995-9562-54b29b558240" containerName="adoption" containerID="cri-o://55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d" gracePeriod=30 Oct 03 18:13:00 crc kubenswrapper[5081]: I1003 18:13:00.646996 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:13:00 crc kubenswrapper[5081]: I1003 18:13:00.647519 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.392199 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.519925 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d\") pod \"21ae62aa-1b0a-4995-9562-54b29b558240\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.520450 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfm7p\" (UniqueName: \"kubernetes.io/projected/21ae62aa-1b0a-4995-9562-54b29b558240-kube-api-access-lfm7p\") pod \"21ae62aa-1b0a-4995-9562-54b29b558240\" (UID: \"21ae62aa-1b0a-4995-9562-54b29b558240\") " Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.528829 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21ae62aa-1b0a-4995-9562-54b29b558240-kube-api-access-lfm7p" (OuterVolumeSpecName: "kube-api-access-lfm7p") pod "21ae62aa-1b0a-4995-9562-54b29b558240" (UID: "21ae62aa-1b0a-4995-9562-54b29b558240"). InnerVolumeSpecName "kube-api-access-lfm7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.534930 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d" (OuterVolumeSpecName: "mariadb-data") pod "21ae62aa-1b0a-4995-9562-54b29b558240" (UID: "21ae62aa-1b0a-4995-9562-54b29b558240"). InnerVolumeSpecName "pvc-718c963e-fccf-4bc5-9279-683879aa763d". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.567017 5081 generic.go:334] "Generic (PLEG): container finished" podID="21ae62aa-1b0a-4995-9562-54b29b558240" containerID="55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d" exitCode=137 Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.567063 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"21ae62aa-1b0a-4995-9562-54b29b558240","Type":"ContainerDied","Data":"55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d"} Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.567080 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.567104 5081 scope.go:117] "RemoveContainer" containerID="55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.567094 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"21ae62aa-1b0a-4995-9562-54b29b558240","Type":"ContainerDied","Data":"41d229f25761f383ca00bfec94fe4928e86638891e89c79489a5e9aaed3ca8bb"} Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.623269 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfm7p\" (UniqueName: \"kubernetes.io/projected/21ae62aa-1b0a-4995-9562-54b29b558240-kube-api-access-lfm7p\") on node \"crc\" DevicePath \"\"" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.623317 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-718c963e-fccf-4bc5-9279-683879aa763d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d\") on node \"crc\" " Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.661982 5081 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.662107 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.662158 5081 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-718c963e-fccf-4bc5-9279-683879aa763d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d") on node "crc" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.670930 5081 scope.go:117] "RemoveContainer" containerID="55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.671195 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Oct 03 18:13:07 crc kubenswrapper[5081]: E1003 18:13:07.671393 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d\": container with ID starting with 55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d not found: ID does not exist" containerID="55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.671447 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d"} err="failed to get container status \"55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d\": rpc error: code = NotFound desc = could not find container \"55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d\": container with ID starting with 55a2fef7e8658daf288e98c4e7e4536630e996e2a1261997cdd3440cb7c0128d not found: ID does not exist" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.725216 5081 reconciler_common.go:293] "Volume detached for volume \"pvc-718c963e-fccf-4bc5-9279-683879aa763d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-718c963e-fccf-4bc5-9279-683879aa763d\") on node \"crc\" DevicePath \"\"" Oct 03 18:13:07 crc kubenswrapper[5081]: I1003 18:13:07.839054 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21ae62aa-1b0a-4995-9562-54b29b558240" path="/var/lib/kubelet/pods/21ae62aa-1b0a-4995-9562-54b29b558240/volumes" Oct 03 18:13:08 crc kubenswrapper[5081]: I1003 18:13:08.231901 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Oct 03 18:13:08 crc kubenswrapper[5081]: I1003 18:13:08.232381 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="bbd48186-ddc4-4dde-a445-a701bf8e3631" containerName="adoption" containerID="cri-o://5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a" gracePeriod=30 Oct 03 18:13:29 crc kubenswrapper[5081]: I1003 18:13:29.524264 5081 scope.go:117] "RemoveContainer" containerID="5c048f016d4ab778895ac5ad55cb569550b2399272abf62f0040df45fbaf8300" Oct 03 18:13:29 crc kubenswrapper[5081]: I1003 18:13:29.547160 5081 scope.go:117] "RemoveContainer" containerID="fcd45e533aa11e0dc203f69513b8fd3429258863d5d9f5cb711d322ab9b5d40b" Oct 03 18:13:29 crc kubenswrapper[5081]: I1003 18:13:29.589023 5081 scope.go:117] "RemoveContainer" containerID="5580c959112dc2bfda0130adfe108bc60d7f0330850f15d8831e865cdaf9cf47" Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.648016 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.648364 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.648407 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.649164 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.649219 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" gracePeriod=600 Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.798015 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" exitCode=0 Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.798125 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2"} Oct 03 18:13:30 crc kubenswrapper[5081]: I1003 18:13:30.798396 5081 scope.go:117] "RemoveContainer" containerID="b8e7d9adfb786987c1bb81783b01b16de5c84deb8a62eab39c30d68b9ffa679e" Oct 03 18:13:30 crc kubenswrapper[5081]: E1003 18:13:30.823889 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:13:31 crc kubenswrapper[5081]: I1003 18:13:31.811190 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:13:31 crc kubenswrapper[5081]: E1003 18:13:31.811860 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.738707 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.891554 5081 generic.go:334] "Generic (PLEG): container finished" podID="bbd48186-ddc4-4dde-a445-a701bf8e3631" containerID="5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a" exitCode=137 Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.891665 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"bbd48186-ddc4-4dde-a445-a701bf8e3631","Type":"ContainerDied","Data":"5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a"} Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.891984 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"bbd48186-ddc4-4dde-a445-a701bf8e3631","Type":"ContainerDied","Data":"84c87876424e7eaadaa0c15f7d37d4fe3d1463761b537a99b848918feda47acf"} Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.892009 5081 scope.go:117] "RemoveContainer" containerID="5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.891682 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.905356 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\") pod \"bbd48186-ddc4-4dde-a445-a701bf8e3631\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.905667 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/bbd48186-ddc4-4dde-a445-a701bf8e3631-ovn-data-cert\") pod \"bbd48186-ddc4-4dde-a445-a701bf8e3631\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.906637 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptdx6\" (UniqueName: \"kubernetes.io/projected/bbd48186-ddc4-4dde-a445-a701bf8e3631-kube-api-access-ptdx6\") pod \"bbd48186-ddc4-4dde-a445-a701bf8e3631\" (UID: \"bbd48186-ddc4-4dde-a445-a701bf8e3631\") " Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.913971 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbd48186-ddc4-4dde-a445-a701bf8e3631-kube-api-access-ptdx6" (OuterVolumeSpecName: "kube-api-access-ptdx6") pod "bbd48186-ddc4-4dde-a445-a701bf8e3631" (UID: "bbd48186-ddc4-4dde-a445-a701bf8e3631"). InnerVolumeSpecName "kube-api-access-ptdx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.917974 5081 scope.go:117] "RemoveContainer" containerID="5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a" Oct 03 18:13:38 crc kubenswrapper[5081]: E1003 18:13:38.920138 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a\": container with ID starting with 5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a not found: ID does not exist" containerID="5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.920196 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a"} err="failed to get container status \"5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a\": rpc error: code = NotFound desc = could not find container \"5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a\": container with ID starting with 5a0394bd026a05ce4bf65f8c46d38cb0d0cf74d53f608af1ac047feb44a7e36a not found: ID does not exist" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.925048 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbd48186-ddc4-4dde-a445-a701bf8e3631-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "bbd48186-ddc4-4dde-a445-a701bf8e3631" (UID: "bbd48186-ddc4-4dde-a445-a701bf8e3631"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:13:38 crc kubenswrapper[5081]: I1003 18:13:38.930207 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16" (OuterVolumeSpecName: "ovn-data") pod "bbd48186-ddc4-4dde-a445-a701bf8e3631" (UID: "bbd48186-ddc4-4dde-a445-a701bf8e3631"). InnerVolumeSpecName "pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.008831 5081 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/bbd48186-ddc4-4dde-a445-a701bf8e3631-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.008864 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptdx6\" (UniqueName: \"kubernetes.io/projected/bbd48186-ddc4-4dde-a445-a701bf8e3631-kube-api-access-ptdx6\") on node \"crc\" DevicePath \"\"" Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.008903 5081 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\") on node \"crc\" " Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.058440 5081 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.058632 5081 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16") on node "crc" Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.111021 5081 reconciler_common.go:293] "Volume detached for volume \"pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40eba6f0-3236-49a4-97b7-26dc81ef0b16\") on node \"crc\" DevicePath \"\"" Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.226114 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.235476 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Oct 03 18:13:39 crc kubenswrapper[5081]: I1003 18:13:39.840435 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbd48186-ddc4-4dde-a445-a701bf8e3631" path="/var/lib/kubelet/pods/bbd48186-ddc4-4dde-a445-a701bf8e3631/volumes" Oct 03 18:13:45 crc kubenswrapper[5081]: I1003 18:13:45.829185 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:13:45 crc kubenswrapper[5081]: E1003 18:13:45.830646 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:13:58 crc kubenswrapper[5081]: I1003 18:13:58.830619 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:13:58 crc kubenswrapper[5081]: E1003 18:13:58.831616 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:14:10 crc kubenswrapper[5081]: I1003 18:14:10.827596 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:14:10 crc kubenswrapper[5081]: E1003 18:14:10.828433 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:14:24 crc kubenswrapper[5081]: I1003 18:14:24.827665 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:14:24 crc kubenswrapper[5081]: E1003 18:14:24.829060 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.945045 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tkqzh/must-gather-9r2h4"] Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.945859 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd48186-ddc4-4dde-a445-a701bf8e3631" containerName="adoption" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.945873 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd48186-ddc4-4dde-a445-a701bf8e3631" containerName="adoption" Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.945887 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="extract-content" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.945893 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="extract-content" Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.945921 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="extract-utilities" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.945928 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="extract-utilities" Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.945945 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ae62aa-1b0a-4995-9562-54b29b558240" containerName="adoption" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.945951 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ae62aa-1b0a-4995-9562-54b29b558240" containerName="adoption" Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.945972 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="registry-server" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.945979 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="registry-server" Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.945996 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="extract-content" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.946003 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="extract-content" Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.946016 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="registry-server" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.946023 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="registry-server" Oct 03 18:14:27 crc kubenswrapper[5081]: E1003 18:14:27.946038 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="extract-utilities" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.946045 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="extract-utilities" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.946283 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="21ae62aa-1b0a-4995-9562-54b29b558240" containerName="adoption" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.946308 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbd48186-ddc4-4dde-a445-a701bf8e3631" containerName="adoption" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.946332 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb451c9b-8243-493b-9af8-1a5d0ce70470" containerName="registry-server" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.946358 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="065c7e94-5237-4aa8-9df0-60b9e3492800" containerName="registry-server" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.947769 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.952880 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tkqzh"/"openshift-service-ca.crt" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.953117 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tkqzh"/"kube-root-ca.crt" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.953269 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-tkqzh"/"default-dockercfg-h25bm" Oct 03 18:14:27 crc kubenswrapper[5081]: I1003 18:14:27.960276 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tkqzh/must-gather-9r2h4"] Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.003174 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr9mm\" (UniqueName: \"kubernetes.io/projected/db561d03-866c-4f07-9775-0c6f0a5cc7ff-kube-api-access-cr9mm\") pod \"must-gather-9r2h4\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.003413 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/db561d03-866c-4f07-9775-0c6f0a5cc7ff-must-gather-output\") pod \"must-gather-9r2h4\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.105693 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/db561d03-866c-4f07-9775-0c6f0a5cc7ff-must-gather-output\") pod \"must-gather-9r2h4\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.105878 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr9mm\" (UniqueName: \"kubernetes.io/projected/db561d03-866c-4f07-9775-0c6f0a5cc7ff-kube-api-access-cr9mm\") pod \"must-gather-9r2h4\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.106610 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/db561d03-866c-4f07-9775-0c6f0a5cc7ff-must-gather-output\") pod \"must-gather-9r2h4\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.131500 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr9mm\" (UniqueName: \"kubernetes.io/projected/db561d03-866c-4f07-9775-0c6f0a5cc7ff-kube-api-access-cr9mm\") pod \"must-gather-9r2h4\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.265911 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.854650 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tkqzh/must-gather-9r2h4"] Oct 03 18:14:28 crc kubenswrapper[5081]: I1003 18:14:28.856223 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 18:14:29 crc kubenswrapper[5081]: I1003 18:14:29.429753 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" event={"ID":"db561d03-866c-4f07-9775-0c6f0a5cc7ff","Type":"ContainerStarted","Data":"168d42a3639b8385f20b2b5b5f4a38253123ed1c198ff5f38f315a5f881d0d54"} Oct 03 18:14:31 crc kubenswrapper[5081]: I1003 18:14:31.916769 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mqn2s"] Oct 03 18:14:31 crc kubenswrapper[5081]: I1003 18:14:31.920349 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:31 crc kubenswrapper[5081]: I1003 18:14:31.929583 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mqn2s"] Oct 03 18:14:31 crc kubenswrapper[5081]: I1003 18:14:31.996687 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-catalog-content\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:31 crc kubenswrapper[5081]: I1003 18:14:31.996996 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-utilities\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:31 crc kubenswrapper[5081]: I1003 18:14:31.997153 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7gvk\" (UniqueName: \"kubernetes.io/projected/92f3c78b-4acd-4691-aade-0e07ea530f63-kube-api-access-f7gvk\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:32 crc kubenswrapper[5081]: I1003 18:14:32.099387 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-catalog-content\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:32 crc kubenswrapper[5081]: I1003 18:14:32.100447 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-utilities\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:32 crc kubenswrapper[5081]: I1003 18:14:32.100665 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7gvk\" (UniqueName: \"kubernetes.io/projected/92f3c78b-4acd-4691-aade-0e07ea530f63-kube-api-access-f7gvk\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:32 crc kubenswrapper[5081]: I1003 18:14:32.100663 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-catalog-content\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:32 crc kubenswrapper[5081]: I1003 18:14:32.100901 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-utilities\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:32 crc kubenswrapper[5081]: I1003 18:14:32.126163 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7gvk\" (UniqueName: \"kubernetes.io/projected/92f3c78b-4acd-4691-aade-0e07ea530f63-kube-api-access-f7gvk\") pod \"redhat-marketplace-mqn2s\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:32 crc kubenswrapper[5081]: I1003 18:14:32.246598 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:34 crc kubenswrapper[5081]: I1003 18:14:34.807935 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mqn2s"] Oct 03 18:14:34 crc kubenswrapper[5081]: W1003 18:14:34.819049 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92f3c78b_4acd_4691_aade_0e07ea530f63.slice/crio-d30d11b75fc6c6032127123368eb0cf70fc91c040dbd235ba67f45c6e3756e9c WatchSource:0}: Error finding container d30d11b75fc6c6032127123368eb0cf70fc91c040dbd235ba67f45c6e3756e9c: Status 404 returned error can't find the container with id d30d11b75fc6c6032127123368eb0cf70fc91c040dbd235ba67f45c6e3756e9c Oct 03 18:14:35 crc kubenswrapper[5081]: I1003 18:14:35.513982 5081 generic.go:334] "Generic (PLEG): container finished" podID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerID="2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493" exitCode=0 Oct 03 18:14:35 crc kubenswrapper[5081]: I1003 18:14:35.514085 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mqn2s" event={"ID":"92f3c78b-4acd-4691-aade-0e07ea530f63","Type":"ContainerDied","Data":"2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493"} Oct 03 18:14:35 crc kubenswrapper[5081]: I1003 18:14:35.514402 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mqn2s" event={"ID":"92f3c78b-4acd-4691-aade-0e07ea530f63","Type":"ContainerStarted","Data":"d30d11b75fc6c6032127123368eb0cf70fc91c040dbd235ba67f45c6e3756e9c"} Oct 03 18:14:35 crc kubenswrapper[5081]: I1003 18:14:35.523350 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" event={"ID":"db561d03-866c-4f07-9775-0c6f0a5cc7ff","Type":"ContainerStarted","Data":"4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0"} Oct 03 18:14:35 crc kubenswrapper[5081]: I1003 18:14:35.523390 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" event={"ID":"db561d03-866c-4f07-9775-0c6f0a5cc7ff","Type":"ContainerStarted","Data":"f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3"} Oct 03 18:14:35 crc kubenswrapper[5081]: I1003 18:14:35.559155 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" podStartSLOduration=2.990879597 podStartE2EDuration="8.559132882s" podCreationTimestamp="2025-10-03 18:14:27 +0000 UTC" firstStartedPulling="2025-10-03 18:14:28.85592273 +0000 UTC m=+9987.821479343" lastFinishedPulling="2025-10-03 18:14:34.424176015 +0000 UTC m=+9993.389732628" observedRunningTime="2025-10-03 18:14:35.548751046 +0000 UTC m=+9994.514307679" watchObservedRunningTime="2025-10-03 18:14:35.559132882 +0000 UTC m=+9994.524689495" Oct 03 18:14:36 crc kubenswrapper[5081]: I1003 18:14:36.827622 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:14:36 crc kubenswrapper[5081]: E1003 18:14:36.828165 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:14:37 crc kubenswrapper[5081]: I1003 18:14:37.545587 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mqn2s" event={"ID":"92f3c78b-4acd-4691-aade-0e07ea530f63","Type":"ContainerStarted","Data":"3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82"} Oct 03 18:14:37 crc kubenswrapper[5081]: E1003 18:14:37.775370 5081 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92f3c78b_4acd_4691_aade_0e07ea530f63.slice/crio-3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82.scope\": RecentStats: unable to find data in memory cache]" Oct 03 18:14:38 crc kubenswrapper[5081]: I1003 18:14:38.558352 5081 generic.go:334] "Generic (PLEG): container finished" podID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerID="3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82" exitCode=0 Oct 03 18:14:38 crc kubenswrapper[5081]: I1003 18:14:38.558538 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mqn2s" event={"ID":"92f3c78b-4acd-4691-aade-0e07ea530f63","Type":"ContainerDied","Data":"3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82"} Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.286366 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-qmvpv"] Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.288215 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.374371 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd2bj\" (UniqueName: \"kubernetes.io/projected/cac95354-2752-4206-bb64-102cfd811bb2-kube-api-access-fd2bj\") pod \"crc-debug-qmvpv\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.374625 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cac95354-2752-4206-bb64-102cfd811bb2-host\") pod \"crc-debug-qmvpv\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.476949 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cac95354-2752-4206-bb64-102cfd811bb2-host\") pod \"crc-debug-qmvpv\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.477074 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd2bj\" (UniqueName: \"kubernetes.io/projected/cac95354-2752-4206-bb64-102cfd811bb2-kube-api-access-fd2bj\") pod \"crc-debug-qmvpv\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.477081 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cac95354-2752-4206-bb64-102cfd811bb2-host\") pod \"crc-debug-qmvpv\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.500300 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd2bj\" (UniqueName: \"kubernetes.io/projected/cac95354-2752-4206-bb64-102cfd811bb2-kube-api-access-fd2bj\") pod \"crc-debug-qmvpv\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: I1003 18:14:39.613234 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:14:39 crc kubenswrapper[5081]: W1003 18:14:39.665519 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcac95354_2752_4206_bb64_102cfd811bb2.slice/crio-26cefadfb44cf5f6d408ccb8f196e3a4f936c505263ec286eb6c2faac0b87385 WatchSource:0}: Error finding container 26cefadfb44cf5f6d408ccb8f196e3a4f936c505263ec286eb6c2faac0b87385: Status 404 returned error can't find the container with id 26cefadfb44cf5f6d408ccb8f196e3a4f936c505263ec286eb6c2faac0b87385 Oct 03 18:14:40 crc kubenswrapper[5081]: I1003 18:14:40.579240 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mqn2s" event={"ID":"92f3c78b-4acd-4691-aade-0e07ea530f63","Type":"ContainerStarted","Data":"3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f"} Oct 03 18:14:40 crc kubenswrapper[5081]: I1003 18:14:40.584898 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" event={"ID":"cac95354-2752-4206-bb64-102cfd811bb2","Type":"ContainerStarted","Data":"26cefadfb44cf5f6d408ccb8f196e3a4f936c505263ec286eb6c2faac0b87385"} Oct 03 18:14:40 crc kubenswrapper[5081]: I1003 18:14:40.598722 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mqn2s" podStartSLOduration=4.902103343 podStartE2EDuration="9.598706713s" podCreationTimestamp="2025-10-03 18:14:31 +0000 UTC" firstStartedPulling="2025-10-03 18:14:35.5187138 +0000 UTC m=+9994.484270403" lastFinishedPulling="2025-10-03 18:14:40.21531716 +0000 UTC m=+9999.180873773" observedRunningTime="2025-10-03 18:14:40.598456355 +0000 UTC m=+9999.564012968" watchObservedRunningTime="2025-10-03 18:14:40.598706713 +0000 UTC m=+9999.564263326" Oct 03 18:14:42 crc kubenswrapper[5081]: I1003 18:14:42.247224 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:42 crc kubenswrapper[5081]: I1003 18:14:42.247717 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:42 crc kubenswrapper[5081]: I1003 18:14:42.347417 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:49 crc kubenswrapper[5081]: I1003 18:14:49.827349 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:14:49 crc kubenswrapper[5081]: E1003 18:14:49.845329 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:14:51 crc kubenswrapper[5081]: I1003 18:14:51.726825 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" event={"ID":"cac95354-2752-4206-bb64-102cfd811bb2","Type":"ContainerStarted","Data":"27d74d5e38e57be3faf3b2df6fddebf8aee636244fd58cecba0c34b086e52a81"} Oct 03 18:14:51 crc kubenswrapper[5081]: I1003 18:14:51.741050 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" podStartSLOduration=0.941551462 podStartE2EDuration="12.741031611s" podCreationTimestamp="2025-10-03 18:14:39 +0000 UTC" firstStartedPulling="2025-10-03 18:14:39.66830611 +0000 UTC m=+9998.633862723" lastFinishedPulling="2025-10-03 18:14:51.467786259 +0000 UTC m=+10010.433342872" observedRunningTime="2025-10-03 18:14:51.737208402 +0000 UTC m=+10010.702765025" watchObservedRunningTime="2025-10-03 18:14:51.741031611 +0000 UTC m=+10010.706588224" Oct 03 18:14:52 crc kubenswrapper[5081]: I1003 18:14:52.300674 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:52 crc kubenswrapper[5081]: I1003 18:14:52.367388 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mqn2s"] Oct 03 18:14:52 crc kubenswrapper[5081]: I1003 18:14:52.748903 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mqn2s" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="registry-server" containerID="cri-o://3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f" gracePeriod=2 Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.287677 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.400079 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7gvk\" (UniqueName: \"kubernetes.io/projected/92f3c78b-4acd-4691-aade-0e07ea530f63-kube-api-access-f7gvk\") pod \"92f3c78b-4acd-4691-aade-0e07ea530f63\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.400130 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-utilities\") pod \"92f3c78b-4acd-4691-aade-0e07ea530f63\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.400185 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-catalog-content\") pod \"92f3c78b-4acd-4691-aade-0e07ea530f63\" (UID: \"92f3c78b-4acd-4691-aade-0e07ea530f63\") " Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.401008 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-utilities" (OuterVolumeSpecName: "utilities") pod "92f3c78b-4acd-4691-aade-0e07ea530f63" (UID: "92f3c78b-4acd-4691-aade-0e07ea530f63"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.408752 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92f3c78b-4acd-4691-aade-0e07ea530f63-kube-api-access-f7gvk" (OuterVolumeSpecName: "kube-api-access-f7gvk") pod "92f3c78b-4acd-4691-aade-0e07ea530f63" (UID: "92f3c78b-4acd-4691-aade-0e07ea530f63"). InnerVolumeSpecName "kube-api-access-f7gvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.416893 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "92f3c78b-4acd-4691-aade-0e07ea530f63" (UID: "92f3c78b-4acd-4691-aade-0e07ea530f63"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.502885 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7gvk\" (UniqueName: \"kubernetes.io/projected/92f3c78b-4acd-4691-aade-0e07ea530f63-kube-api-access-f7gvk\") on node \"crc\" DevicePath \"\"" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.502936 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.502946 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92f3c78b-4acd-4691-aade-0e07ea530f63-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.760098 5081 generic.go:334] "Generic (PLEG): container finished" podID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerID="3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f" exitCode=0 Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.760139 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mqn2s" event={"ID":"92f3c78b-4acd-4691-aade-0e07ea530f63","Type":"ContainerDied","Data":"3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f"} Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.760173 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mqn2s" event={"ID":"92f3c78b-4acd-4691-aade-0e07ea530f63","Type":"ContainerDied","Data":"d30d11b75fc6c6032127123368eb0cf70fc91c040dbd235ba67f45c6e3756e9c"} Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.760173 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mqn2s" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.760191 5081 scope.go:117] "RemoveContainer" containerID="3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.793009 5081 scope.go:117] "RemoveContainer" containerID="3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.802880 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mqn2s"] Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.823392 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mqn2s"] Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.829693 5081 scope.go:117] "RemoveContainer" containerID="2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.844907 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" path="/var/lib/kubelet/pods/92f3c78b-4acd-4691-aade-0e07ea530f63/volumes" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.884713 5081 scope.go:117] "RemoveContainer" containerID="3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f" Oct 03 18:14:53 crc kubenswrapper[5081]: E1003 18:14:53.885278 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f\": container with ID starting with 3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f not found: ID does not exist" containerID="3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.885319 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f"} err="failed to get container status \"3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f\": rpc error: code = NotFound desc = could not find container \"3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f\": container with ID starting with 3794541ef07f2cc5a0084300302d0acc6bc7817df18799f7e4a0551e6a24b31f not found: ID does not exist" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.885365 5081 scope.go:117] "RemoveContainer" containerID="3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82" Oct 03 18:14:53 crc kubenswrapper[5081]: E1003 18:14:53.886183 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82\": container with ID starting with 3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82 not found: ID does not exist" containerID="3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.886234 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82"} err="failed to get container status \"3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82\": rpc error: code = NotFound desc = could not find container \"3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82\": container with ID starting with 3b465d3075e4f917cb00c5fc2e955184c329d2cfc7696442768e219f4ef6ed82 not found: ID does not exist" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.886255 5081 scope.go:117] "RemoveContainer" containerID="2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493" Oct 03 18:14:53 crc kubenswrapper[5081]: E1003 18:14:53.890048 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493\": container with ID starting with 2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493 not found: ID does not exist" containerID="2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493" Oct 03 18:14:53 crc kubenswrapper[5081]: I1003 18:14:53.890085 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493"} err="failed to get container status \"2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493\": rpc error: code = NotFound desc = could not find container \"2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493\": container with ID starting with 2b671c5a971cfc8f296059e9f2fd81cf73cba536fbaebd16628bb7af22ea2493 not found: ID does not exist" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.146949 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w"] Oct 03 18:15:00 crc kubenswrapper[5081]: E1003 18:15:00.147823 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="registry-server" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.147837 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="registry-server" Oct 03 18:15:00 crc kubenswrapper[5081]: E1003 18:15:00.147852 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="extract-utilities" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.147859 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="extract-utilities" Oct 03 18:15:00 crc kubenswrapper[5081]: E1003 18:15:00.147895 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="extract-content" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.147902 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="extract-content" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.148117 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f3c78b-4acd-4691-aade-0e07ea530f63" containerName="registry-server" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.148905 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.151957 5081 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.152608 5081 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.186717 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w"] Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.348351 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c7a66362-d12f-49cb-bbf2-672024bbd70d-secret-volume\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.349650 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-255kl\" (UniqueName: \"kubernetes.io/projected/c7a66362-d12f-49cb-bbf2-672024bbd70d-kube-api-access-255kl\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.349719 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c7a66362-d12f-49cb-bbf2-672024bbd70d-config-volume\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.451839 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c7a66362-d12f-49cb-bbf2-672024bbd70d-secret-volume\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.451998 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-255kl\" (UniqueName: \"kubernetes.io/projected/c7a66362-d12f-49cb-bbf2-672024bbd70d-kube-api-access-255kl\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.452240 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c7a66362-d12f-49cb-bbf2-672024bbd70d-config-volume\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.453218 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c7a66362-d12f-49cb-bbf2-672024bbd70d-config-volume\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.459099 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c7a66362-d12f-49cb-bbf2-672024bbd70d-secret-volume\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.471692 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-255kl\" (UniqueName: \"kubernetes.io/projected/c7a66362-d12f-49cb-bbf2-672024bbd70d-kube-api-access-255kl\") pod \"collect-profiles-29325255-7cf5w\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:00 crc kubenswrapper[5081]: I1003 18:15:00.772389 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:01 crc kubenswrapper[5081]: I1003 18:15:01.246526 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w"] Oct 03 18:15:01 crc kubenswrapper[5081]: I1003 18:15:01.851233 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" event={"ID":"c7a66362-d12f-49cb-bbf2-672024bbd70d","Type":"ContainerStarted","Data":"e2a61bdc17d845e42736d5f24b55c16aafb754738ed8015231d6125c5867be04"} Oct 03 18:15:01 crc kubenswrapper[5081]: I1003 18:15:01.851535 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" event={"ID":"c7a66362-d12f-49cb-bbf2-672024bbd70d","Type":"ContainerStarted","Data":"98421110e6eb1b2bffacb17a285995659d1a9bc42b5cc3f3267df20e9fd76654"} Oct 03 18:15:01 crc kubenswrapper[5081]: I1003 18:15:01.885645 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" podStartSLOduration=1.885627526 podStartE2EDuration="1.885627526s" podCreationTimestamp="2025-10-03 18:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:15:01.873078718 +0000 UTC m=+10020.838635351" watchObservedRunningTime="2025-10-03 18:15:01.885627526 +0000 UTC m=+10020.851184139" Oct 03 18:15:02 crc kubenswrapper[5081]: I1003 18:15:02.827464 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:15:02 crc kubenswrapper[5081]: E1003 18:15:02.828676 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:15:02 crc kubenswrapper[5081]: I1003 18:15:02.864683 5081 generic.go:334] "Generic (PLEG): container finished" podID="c7a66362-d12f-49cb-bbf2-672024bbd70d" containerID="e2a61bdc17d845e42736d5f24b55c16aafb754738ed8015231d6125c5867be04" exitCode=0 Oct 03 18:15:02 crc kubenswrapper[5081]: I1003 18:15:02.864725 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" event={"ID":"c7a66362-d12f-49cb-bbf2-672024bbd70d","Type":"ContainerDied","Data":"e2a61bdc17d845e42736d5f24b55c16aafb754738ed8015231d6125c5867be04"} Oct 03 18:15:04 crc kubenswrapper[5081]: I1003 18:15:04.895744 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" event={"ID":"c7a66362-d12f-49cb-bbf2-672024bbd70d","Type":"ContainerDied","Data":"98421110e6eb1b2bffacb17a285995659d1a9bc42b5cc3f3267df20e9fd76654"} Oct 03 18:15:04 crc kubenswrapper[5081]: I1003 18:15:04.896550 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98421110e6eb1b2bffacb17a285995659d1a9bc42b5cc3f3267df20e9fd76654" Oct 03 18:15:04 crc kubenswrapper[5081]: I1003 18:15:04.949833 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.071427 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c7a66362-d12f-49cb-bbf2-672024bbd70d-config-volume\") pod \"c7a66362-d12f-49cb-bbf2-672024bbd70d\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.071693 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-255kl\" (UniqueName: \"kubernetes.io/projected/c7a66362-d12f-49cb-bbf2-672024bbd70d-kube-api-access-255kl\") pod \"c7a66362-d12f-49cb-bbf2-672024bbd70d\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.071748 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c7a66362-d12f-49cb-bbf2-672024bbd70d-secret-volume\") pod \"c7a66362-d12f-49cb-bbf2-672024bbd70d\" (UID: \"c7a66362-d12f-49cb-bbf2-672024bbd70d\") " Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.074553 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7a66362-d12f-49cb-bbf2-672024bbd70d-config-volume" (OuterVolumeSpecName: "config-volume") pod "c7a66362-d12f-49cb-bbf2-672024bbd70d" (UID: "c7a66362-d12f-49cb-bbf2-672024bbd70d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.121293 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7a66362-d12f-49cb-bbf2-672024bbd70d-kube-api-access-255kl" (OuterVolumeSpecName: "kube-api-access-255kl") pod "c7a66362-d12f-49cb-bbf2-672024bbd70d" (UID: "c7a66362-d12f-49cb-bbf2-672024bbd70d"). InnerVolumeSpecName "kube-api-access-255kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.123088 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a66362-d12f-49cb-bbf2-672024bbd70d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c7a66362-d12f-49cb-bbf2-672024bbd70d" (UID: "c7a66362-d12f-49cb-bbf2-672024bbd70d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.175847 5081 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c7a66362-d12f-49cb-bbf2-672024bbd70d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.175877 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-255kl\" (UniqueName: \"kubernetes.io/projected/c7a66362-d12f-49cb-bbf2-672024bbd70d-kube-api-access-255kl\") on node \"crc\" DevicePath \"\"" Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.175888 5081 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c7a66362-d12f-49cb-bbf2-672024bbd70d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 18:15:05 crc kubenswrapper[5081]: I1003 18:15:05.938952 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325255-7cf5w" Oct 03 18:15:06 crc kubenswrapper[5081]: I1003 18:15:06.037566 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r"] Oct 03 18:15:06 crc kubenswrapper[5081]: I1003 18:15:06.046143 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325210-xhc7r"] Oct 03 18:15:07 crc kubenswrapper[5081]: I1003 18:15:07.841725 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f73c6db5-52fb-49c4-9cdb-2405e5f48572" path="/var/lib/kubelet/pods/f73c6db5-52fb-49c4-9cdb-2405e5f48572/volumes" Oct 03 18:15:16 crc kubenswrapper[5081]: I1003 18:15:16.828229 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:15:16 crc kubenswrapper[5081]: E1003 18:15:16.829334 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:15:29 crc kubenswrapper[5081]: I1003 18:15:29.820936 5081 scope.go:117] "RemoveContainer" containerID="7be7424434f93a479457a1572354b79b32e08e626dd7e6e015affd448fedc096" Oct 03 18:15:31 crc kubenswrapper[5081]: I1003 18:15:31.837964 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:15:31 crc kubenswrapper[5081]: E1003 18:15:31.838839 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:15:37 crc kubenswrapper[5081]: I1003 18:15:37.779228 5081 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","podc7a66362-d12f-49cb-bbf2-672024bbd70d"] err="unable to destroy cgroup paths for cgroup [kubepods burstable podc7a66362-d12f-49cb-bbf2-672024bbd70d] : Timed out while waiting for systemd to remove kubepods-burstable-podc7a66362_d12f_49cb_bbf2_672024bbd70d.slice" Oct 03 18:15:42 crc kubenswrapper[5081]: I1003 18:15:42.827715 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:15:42 crc kubenswrapper[5081]: E1003 18:15:42.828886 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:15:53 crc kubenswrapper[5081]: I1003 18:15:53.831211 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:15:53 crc kubenswrapper[5081]: E1003 18:15:53.831994 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:16:05 crc kubenswrapper[5081]: I1003 18:16:05.830376 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:16:05 crc kubenswrapper[5081]: E1003 18:16:05.831303 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:16:14 crc kubenswrapper[5081]: I1003 18:16:14.021611 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_0a09ecee-8e55-49a6-9849-b36d11700f3e/init-config-reloader/0.log" Oct 03 18:16:14 crc kubenswrapper[5081]: I1003 18:16:14.446711 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_0a09ecee-8e55-49a6-9849-b36d11700f3e/init-config-reloader/0.log" Oct 03 18:16:14 crc kubenswrapper[5081]: I1003 18:16:14.501629 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_0a09ecee-8e55-49a6-9849-b36d11700f3e/alertmanager/0.log" Oct 03 18:16:14 crc kubenswrapper[5081]: I1003 18:16:14.701393 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_0a09ecee-8e55-49a6-9849-b36d11700f3e/config-reloader/0.log" Oct 03 18:16:14 crc kubenswrapper[5081]: I1003 18:16:14.902079 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_225c5210-2413-4098-8d82-9907db3aad43/aodh-api/0.log" Oct 03 18:16:15 crc kubenswrapper[5081]: I1003 18:16:14.997991 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_225c5210-2413-4098-8d82-9907db3aad43/aodh-evaluator/0.log" Oct 03 18:16:15 crc kubenswrapper[5081]: I1003 18:16:15.189502 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_225c5210-2413-4098-8d82-9907db3aad43/aodh-listener/0.log" Oct 03 18:16:15 crc kubenswrapper[5081]: I1003 18:16:15.825043 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_225c5210-2413-4098-8d82-9907db3aad43/aodh-notifier/0.log" Oct 03 18:16:16 crc kubenswrapper[5081]: I1003 18:16:16.224481 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-577f988d76-v8j4v_ff3fb265-cd51-4e41-823f-c0aa9bea922b/barbican-api/0.log" Oct 03 18:16:16 crc kubenswrapper[5081]: I1003 18:16:16.310972 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-577f988d76-v8j4v_ff3fb265-cd51-4e41-823f-c0aa9bea922b/barbican-api-log/0.log" Oct 03 18:16:16 crc kubenswrapper[5081]: I1003 18:16:16.500901 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-78657f88c4-88v4m_da20472c-048f-4648-9bb5-fdee538607f4/barbican-keystone-listener/0.log" Oct 03 18:16:16 crc kubenswrapper[5081]: I1003 18:16:16.761395 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-df9f9c479-tccq5_03abedc4-2d10-4f99-bbe3-df6ddc2e853b/barbican-worker/0.log" Oct 03 18:16:16 crc kubenswrapper[5081]: I1003 18:16:16.762065 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-78657f88c4-88v4m_da20472c-048f-4648-9bb5-fdee538607f4/barbican-keystone-listener-log/0.log" Oct 03 18:16:17 crc kubenswrapper[5081]: I1003 18:16:17.639655 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-df9f9c479-tccq5_03abedc4-2d10-4f99-bbe3-df6ddc2e853b/barbican-worker-log/0.log" Oct 03 18:16:17 crc kubenswrapper[5081]: I1003 18:16:17.668428 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-xjbtb_fa4df372-397d-46cd-81e5-a8dae67295ad/bootstrap-openstack-openstack-cell1/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.024254 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1633625d-b110-403f-81cf-378b74105c5d/ceilometer-central-agent/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.058158 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1633625d-b110-403f-81cf-378b74105c5d/proxy-httpd/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.117083 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1633625d-b110-403f-81cf-378b74105c5d/ceilometer-notification-agent/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.239832 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1633625d-b110-403f-81cf-378b74105c5d/sg-core/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.324014 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-tvwmz_a37cad0b-caa9-4c10-a77a-a805bdf7ea1f/ceph-client-openstack-openstack-cell1/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.511664 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_69b4808f-07a4-457b-97b9-675631790938/cinder-api-log/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.584250 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_69b4808f-07a4-457b-97b9-675631790938/cinder-api/0.log" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.833876 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:16:18 crc kubenswrapper[5081]: E1003 18:16:18.834284 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:16:18 crc kubenswrapper[5081]: I1003 18:16:18.959942 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_00a5c66b-f312-4379-9769-64c858f3816a/cinder-backup/0.log" Oct 03 18:16:19 crc kubenswrapper[5081]: I1003 18:16:19.030767 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_00a5c66b-f312-4379-9769-64c858f3816a/probe/0.log" Oct 03 18:16:19 crc kubenswrapper[5081]: I1003 18:16:19.284577 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_dda4cdc8-07f1-4153-8531-0a827ccf3029/cinder-scheduler/0.log" Oct 03 18:16:19 crc kubenswrapper[5081]: I1003 18:16:19.334596 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_dda4cdc8-07f1-4153-8531-0a827ccf3029/probe/0.log" Oct 03 18:16:19 crc kubenswrapper[5081]: I1003 18:16:19.497606 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_aa84b169-b917-4fcf-86a8-cfcde993fd80/cinder-volume/0.log" Oct 03 18:16:19 crc kubenswrapper[5081]: I1003 18:16:19.560184 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_aa84b169-b917-4fcf-86a8-cfcde993fd80/probe/0.log" Oct 03 18:16:19 crc kubenswrapper[5081]: I1003 18:16:19.734098 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-tdk8r_7552988a-62cb-429c-b959-44546d45ba71/configure-network-openstack-openstack-cell1/0.log" Oct 03 18:16:19 crc kubenswrapper[5081]: I1003 18:16:19.937749 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-g54tj_389263a2-812c-4ded-bc03-549916284b76/configure-os-openstack-openstack-cell1/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.015065 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-66d4fbb967-2bmls_36922327-95b1-465f-9628-2a9056e8f6b4/init/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.244170 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-66d4fbb967-2bmls_36922327-95b1-465f-9628-2a9056e8f6b4/init/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.253971 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-66d4fbb967-2bmls_36922327-95b1-465f-9628-2a9056e8f6b4/dnsmasq-dns/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.455370 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-tckpk_b07c8283-4077-478a-a8ff-18433231fb38/download-cache-openstack-openstack-cell1/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.541424 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3a080e0a-e9a0-41a5-b0e1-600b6e4d854a/glance-httpd/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.677199 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3a080e0a-e9a0-41a5-b0e1-600b6e4d854a/glance-log/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.761020 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e7eb7b30-2efc-4e38-b64e-61ea399c7303/glance-httpd/0.log" Oct 03 18:16:20 crc kubenswrapper[5081]: I1003 18:16:20.951005 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e7eb7b30-2efc-4e38-b64e-61ea399c7303/glance-log/0.log" Oct 03 18:16:21 crc kubenswrapper[5081]: I1003 18:16:21.069246 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-59bdc8c586-94n9t_ae3762c4-4c4b-4aa3-894c-b690f2278873/heat-api/0.log" Oct 03 18:16:21 crc kubenswrapper[5081]: I1003 18:16:21.403951 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-6cdc8c77c-jb7zk_6e40af3b-73b5-4f03-87f7-cad577ecb4da/heat-cfnapi/0.log" Oct 03 18:16:21 crc kubenswrapper[5081]: I1003 18:16:21.473717 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-764cf6b767-tpqfz_cccdc4a3-640a-4f97-800c-d5e8dce5c50e/heat-engine/0.log" Oct 03 18:16:21 crc kubenswrapper[5081]: I1003 18:16:21.664648 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-bdb759599-v7cc7_0d981dd8-ad4a-4483-a79b-7189f460f7b5/horizon/0.log" Oct 03 18:16:21 crc kubenswrapper[5081]: I1003 18:16:21.814818 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-bdb759599-v7cc7_0d981dd8-ad4a-4483-a79b-7189f460f7b5/horizon-log/0.log" Oct 03 18:16:21 crc kubenswrapper[5081]: I1003 18:16:21.943710 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-g6jcr_f146ceaa-8661-4678-9ac0-8b6758943586/install-certs-openstack-openstack-cell1/0.log" Oct 03 18:16:22 crc kubenswrapper[5081]: I1003 18:16:22.167994 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-5sxn6_16e0fc84-51ae-4419-8ba0-0268aa8b5f6e/install-os-openstack-openstack-cell1/0.log" Oct 03 18:16:22 crc kubenswrapper[5081]: I1003 18:16:22.415534 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29325181-bctjv_d4bf20b9-db74-4f02-bc9a-22f16465e199/keystone-cron/0.log" Oct 03 18:16:22 crc kubenswrapper[5081]: I1003 18:16:22.428502 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6f87fd488-szdht_84d9e617-4210-4e00-9fe0-f6c065ce1282/keystone-api/0.log" Oct 03 18:16:22 crc kubenswrapper[5081]: I1003 18:16:22.603134 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29325241-j44l4_b1f28221-9081-4daf-aea6-42b308f80bae/keystone-cron/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.080575 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e19c8b15-8fab-44b0-82d1-5929e7568034/kube-state-metrics/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.126459 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-5d9kd_7014f4b4-d256-4a19-9e35-1e3afc33fd76/libvirt-openstack-openstack-cell1/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.377072 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_0ea72c99-363e-402d-a70d-ab74578c11b3/manila-api/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.445577 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_0ea72c99-363e-402d-a70d-ab74578c11b3/manila-api-log/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.643551 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_8bf9610d-b7df-4307-91cc-71c7dcc42da9/manila-scheduler/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.648993 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_8bf9610d-b7df-4307-91cc-71c7dcc42da9/probe/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.863610 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e89d46ed-7517-4484-b4f7-9e067e6cf6d1/manila-share/0.log" Oct 03 18:16:23 crc kubenswrapper[5081]: I1003 18:16:23.886783 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_e89d46ed-7517-4484-b4f7-9e067e6cf6d1/probe/0.log" Oct 03 18:16:24 crc kubenswrapper[5081]: I1003 18:16:24.498554 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66bff68f7c-cm8v6_3cbb4cae-c663-4fbe-91b1-3bdc89f48b83/neutron-httpd/0.log" Oct 03 18:16:24 crc kubenswrapper[5081]: I1003 18:16:24.580086 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66bff68f7c-cm8v6_3cbb4cae-c663-4fbe-91b1-3bdc89f48b83/neutron-api/0.log" Oct 03 18:16:24 crc kubenswrapper[5081]: I1003 18:16:24.929455 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-6wdgs_acec17fb-26c9-474e-a337-31044887b6fe/neutron-dhcp-openstack-openstack-cell1/0.log" Oct 03 18:16:25 crc kubenswrapper[5081]: I1003 18:16:25.305303 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-fz5cs_e1230e1d-eabc-4a43-b030-51403a3c15e8/neutron-metadata-openstack-openstack-cell1/0.log" Oct 03 18:16:25 crc kubenswrapper[5081]: I1003 18:16:25.707737 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-dnmvd_8c4c91ae-3176-4f71-862a-e818d8d7f212/neutron-sriov-openstack-openstack-cell1/0.log" Oct 03 18:16:26 crc kubenswrapper[5081]: I1003 18:16:26.071276 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_995b2446-f90a-4493-8a6f-668d1b2bd321/nova-api-api/0.log" Oct 03 18:16:26 crc kubenswrapper[5081]: I1003 18:16:26.303119 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_995b2446-f90a-4493-8a6f-668d1b2bd321/nova-api-log/0.log" Oct 03 18:16:26 crc kubenswrapper[5081]: I1003 18:16:26.595359 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_311e8141-0ab3-4921-b678-528ba5e545f0/nova-cell0-conductor-conductor/0.log" Oct 03 18:16:27 crc kubenswrapper[5081]: I1003 18:16:27.019715 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_a25ab879-1c41-4b6e-920c-51903f580487/nova-cell1-conductor-conductor/0.log" Oct 03 18:16:27 crc kubenswrapper[5081]: I1003 18:16:27.483065 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_85e6ff59-4218-4af3-8a06-ab7babff11f1/memcached/0.log" Oct 03 18:16:27 crc kubenswrapper[5081]: I1003 18:16:27.587081 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ae810f05-5faa-4f0b-922e-9c2128d25d5b/nova-cell1-novncproxy-novncproxy/0.log" Oct 03 18:16:27 crc kubenswrapper[5081]: I1003 18:16:27.955279 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celld2tjc_8cf6daab-c22a-4cd4-8d88-64a2bf39e05c/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Oct 03 18:16:28 crc kubenswrapper[5081]: I1003 18:16:28.105864 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-rwk8c_f85372bd-deb7-4f5d-b631-1b26524ca9ff/nova-cell1-openstack-openstack-cell1/0.log" Oct 03 18:16:28 crc kubenswrapper[5081]: I1003 18:16:28.261765 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c095f0e1-925d-4ed3-afc0-7392d36ce821/nova-metadata-log/0.log" Oct 03 18:16:28 crc kubenswrapper[5081]: I1003 18:16:28.358032 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c095f0e1-925d-4ed3-afc0-7392d36ce821/nova-metadata-metadata/0.log" Oct 03 18:16:28 crc kubenswrapper[5081]: I1003 18:16:28.543595 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_5b68a946-83c2-4eb6-8472-11a5fc334f38/nova-scheduler-scheduler/0.log" Oct 03 18:16:28 crc kubenswrapper[5081]: I1003 18:16:28.661062 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7d488df5bb-2hhk4_819412d5-054a-44e9-993a-0b4a33fe300b/init/0.log" Oct 03 18:16:28 crc kubenswrapper[5081]: I1003 18:16:28.877623 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7d488df5bb-2hhk4_819412d5-054a-44e9-993a-0b4a33fe300b/init/0.log" Oct 03 18:16:28 crc kubenswrapper[5081]: I1003 18:16:28.932423 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7d488df5bb-2hhk4_819412d5-054a-44e9-993a-0b4a33fe300b/octavia-api-provider-agent/0.log" Oct 03 18:16:29 crc kubenswrapper[5081]: I1003 18:16:29.135844 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7d488df5bb-2hhk4_819412d5-054a-44e9-993a-0b4a33fe300b/octavia-api/0.log" Oct 03 18:16:29 crc kubenswrapper[5081]: I1003 18:16:29.213528 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-sjtg8_6d2de904-d28d-44cf-95ba-1be8e12f2699/init/0.log" Oct 03 18:16:29 crc kubenswrapper[5081]: I1003 18:16:29.395955 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-sjtg8_6d2de904-d28d-44cf-95ba-1be8e12f2699/init/0.log" Oct 03 18:16:29 crc kubenswrapper[5081]: I1003 18:16:29.508357 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-sjtg8_6d2de904-d28d-44cf-95ba-1be8e12f2699/octavia-healthmanager/0.log" Oct 03 18:16:29 crc kubenswrapper[5081]: I1003 18:16:29.637916 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-5r6rb_9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f/init/0.log" Oct 03 18:16:29 crc kubenswrapper[5081]: I1003 18:16:29.774244 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-5r6rb_9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f/init/0.log" Oct 03 18:16:30 crc kubenswrapper[5081]: I1003 18:16:30.313896 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-5r6rb_9e7f5f34-1dae-4c4b-88b7-ec46d1336f8f/octavia-housekeeping/0.log" Oct 03 18:16:30 crc kubenswrapper[5081]: I1003 18:16:30.395479 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-678599687f-c5qjv_6ee77da4-e567-42df-b018-d5c1a2bb0c59/init/0.log" Oct 03 18:16:30 crc kubenswrapper[5081]: I1003 18:16:30.586027 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-678599687f-c5qjv_6ee77da4-e567-42df-b018-d5c1a2bb0c59/octavia-amphora-httpd/0.log" Oct 03 18:16:30 crc kubenswrapper[5081]: I1003 18:16:30.600522 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-678599687f-c5qjv_6ee77da4-e567-42df-b018-d5c1a2bb0c59/init/0.log" Oct 03 18:16:30 crc kubenswrapper[5081]: I1003 18:16:30.785148 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-c9kjr_6b6c0048-f1a7-4325-90c6-885a102c1696/init/0.log" Oct 03 18:16:30 crc kubenswrapper[5081]: I1003 18:16:30.909745 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-c9kjr_6b6c0048-f1a7-4325-90c6-885a102c1696/init/0.log" Oct 03 18:16:30 crc kubenswrapper[5081]: I1003 18:16:30.931052 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-c9kjr_6b6c0048-f1a7-4325-90c6-885a102c1696/octavia-rsyslog/0.log" Oct 03 18:16:31 crc kubenswrapper[5081]: I1003 18:16:31.154128 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-xxbmw_faef3e0c-bc19-4475-9c3b-fb8aea539120/init/0.log" Oct 03 18:16:31 crc kubenswrapper[5081]: I1003 18:16:31.336082 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-xxbmw_faef3e0c-bc19-4475-9c3b-fb8aea539120/init/0.log" Oct 03 18:16:31 crc kubenswrapper[5081]: I1003 18:16:31.528511 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-xxbmw_faef3e0c-bc19-4475-9c3b-fb8aea539120/octavia-worker/0.log" Oct 03 18:16:31 crc kubenswrapper[5081]: I1003 18:16:31.600538 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc/mysql-bootstrap/0.log" Oct 03 18:16:31 crc kubenswrapper[5081]: I1003 18:16:31.836819 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:16:31 crc kubenswrapper[5081]: E1003 18:16:31.837813 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:16:32 crc kubenswrapper[5081]: I1003 18:16:32.276730 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc/mysql-bootstrap/0.log" Oct 03 18:16:32 crc kubenswrapper[5081]: I1003 18:16:32.319324 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_afc0e0e6-9d42-4e1e-9dd5-79d2634f26bc/galera/0.log" Oct 03 18:16:32 crc kubenswrapper[5081]: I1003 18:16:32.443600 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f2d92c51-1c3d-401a-b405-973b0ec094b7/mysql-bootstrap/0.log" Oct 03 18:16:32 crc kubenswrapper[5081]: I1003 18:16:32.642943 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f2d92c51-1c3d-401a-b405-973b0ec094b7/mysql-bootstrap/0.log" Oct 03 18:16:32 crc kubenswrapper[5081]: I1003 18:16:32.689440 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f2d92c51-1c3d-401a-b405-973b0ec094b7/galera/0.log" Oct 03 18:16:32 crc kubenswrapper[5081]: I1003 18:16:32.861284 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0abfafdf-c49b-4af3-99d0-772c7fb96392/openstackclient/0.log" Oct 03 18:16:32 crc kubenswrapper[5081]: I1003 18:16:32.930513 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-fjdv6_ed1f167d-3b34-4746-bf62-f4bea485b117/ovn-controller/0.log" Oct 03 18:16:33 crc kubenswrapper[5081]: I1003 18:16:33.122890 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-5xtx9_5e9fd2cd-5158-4c3d-ac50-60e94ba543b4/openstack-network-exporter/0.log" Oct 03 18:16:33 crc kubenswrapper[5081]: I1003 18:16:33.260241 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ccgkm_923d9dbc-ce8e-48b1-8425-4d9075edfa5c/ovsdb-server-init/0.log" Oct 03 18:16:33 crc kubenswrapper[5081]: I1003 18:16:33.442872 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ccgkm_923d9dbc-ce8e-48b1-8425-4d9075edfa5c/ovsdb-server-init/0.log" Oct 03 18:16:33 crc kubenswrapper[5081]: I1003 18:16:33.493773 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ccgkm_923d9dbc-ce8e-48b1-8425-4d9075edfa5c/ovsdb-server/0.log" Oct 03 18:16:33 crc kubenswrapper[5081]: I1003 18:16:33.689250 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_38891ae7-ca75-4e26-817f-b3594f81aa8b/openstack-network-exporter/0.log" Oct 03 18:16:33 crc kubenswrapper[5081]: I1003 18:16:33.694753 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ccgkm_923d9dbc-ce8e-48b1-8425-4d9075edfa5c/ovs-vswitchd/0.log" Oct 03 18:16:33 crc kubenswrapper[5081]: I1003 18:16:33.807303 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_38891ae7-ca75-4e26-817f-b3594f81aa8b/ovn-northd/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.105160 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c9ad8da2-f647-4cc9-ba8c-081118e9c54d/openstack-network-exporter/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.220919 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-xrjgj_cac5b3b6-e745-4550-82d3-49b8366b411d/ovn-openstack-openstack-cell1/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.319429 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c9ad8da2-f647-4cc9-ba8c-081118e9c54d/ovsdbserver-nb/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.391968 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_a9bc237e-7713-46e2-b1ee-aa2bdd9e724f/openstack-network-exporter/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.487780 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_a9bc237e-7713-46e2-b1ee-aa2bdd9e724f/ovsdbserver-nb/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.569993 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_9ff15d64-ab68-41c5-9cac-bd3f5bc8b019/openstack-network-exporter/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.687373 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_9ff15d64-ab68-41c5-9cac-bd3f5bc8b019/ovsdbserver-nb/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.772813 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7e83a75a-8fba-4c53-8e12-098062e659a8/openstack-network-exporter/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.887921 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7e83a75a-8fba-4c53-8e12-098062e659a8/ovsdbserver-sb/0.log" Oct 03 18:16:34 crc kubenswrapper[5081]: I1003 18:16:34.968425 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_3f003a0c-17ad-436e-b8a6-9de8188717d1/openstack-network-exporter/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.071684 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_3f003a0c-17ad-436e-b8a6-9de8188717d1/ovsdbserver-sb/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.183450 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_9afa3095-5432-4475-bd7a-cd4f76cac607/openstack-network-exporter/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.229873 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_9afa3095-5432-4475-bd7a-cd4f76cac607/ovsdbserver-sb/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.402130 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b65dbcc9d-xtvgl_ad02f846-e187-4bdd-b2a0-0ba53fc9b6af/placement-api/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.483459 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b65dbcc9d-xtvgl_ad02f846-e187-4bdd-b2a0-0ba53fc9b6af/placement-log/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.625420 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-c4q25d_5de866ec-f278-4793-953c-7fdd64f008d7/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.847120 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_675a2924-f39b-4c35-9411-308db76e69aa/init-config-reloader/0.log" Oct 03 18:16:35 crc kubenswrapper[5081]: I1003 18:16:35.989485 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_675a2924-f39b-4c35-9411-308db76e69aa/init-config-reloader/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.015924 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_675a2924-f39b-4c35-9411-308db76e69aa/config-reloader/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.050526 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_675a2924-f39b-4c35-9411-308db76e69aa/prometheus/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.165506 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_675a2924-f39b-4c35-9411-308db76e69aa/thanos-sidecar/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.228695 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e885ab7c-f947-4729-8711-a2142a7d2667/setup-container/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.434052 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e885ab7c-f947-4729-8711-a2142a7d2667/setup-container/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.501329 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e885ab7c-f947-4729-8711-a2142a7d2667/rabbitmq/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.630751 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b9bd920-4f84-49f3-b731-eceb9244abd4/setup-container/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.764981 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b9bd920-4f84-49f3-b731-eceb9244abd4/setup-container/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.879766 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9b9bd920-4f84-49f3-b731-eceb9244abd4/rabbitmq/0.log" Oct 03 18:16:36 crc kubenswrapper[5081]: I1003 18:16:36.954123 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-r6dwf_530974f1-240c-4409-895f-c0d97190e235/reboot-os-openstack-openstack-cell1/0.log" Oct 03 18:16:37 crc kubenswrapper[5081]: I1003 18:16:37.064906 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-d4q95_458f0d57-1001-444a-ba77-15cb42cc0ceb/run-os-openstack-openstack-cell1/0.log" Oct 03 18:16:37 crc kubenswrapper[5081]: I1003 18:16:37.227430 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-9gkk8_c1204f6d-ed27-4b6d-bd88-838314a990be/ssh-known-hosts-openstack/0.log" Oct 03 18:16:37 crc kubenswrapper[5081]: I1003 18:16:37.445693 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-kvb6x_0f4a9c1c-622d-4712-9545-2ba16f2dd133/telemetry-openstack-openstack-cell1/0.log" Oct 03 18:16:37 crc kubenswrapper[5081]: I1003 18:16:37.814851 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-mlqq9_bd0f854a-61ae-40aa-b100-ae4918c3dcea/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Oct 03 18:16:37 crc kubenswrapper[5081]: I1003 18:16:37.868177 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-srdwj_890f24f0-7e77-4fb6-a241-51f148fba79c/validate-network-openstack-openstack-cell1/0.log" Oct 03 18:16:46 crc kubenswrapper[5081]: I1003 18:16:46.827811 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:16:46 crc kubenswrapper[5081]: E1003 18:16:46.828699 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:17:00 crc kubenswrapper[5081]: I1003 18:17:00.828180 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:17:00 crc kubenswrapper[5081]: E1003 18:17:00.829088 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:17:13 crc kubenswrapper[5081]: I1003 18:17:13.827857 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:17:13 crc kubenswrapper[5081]: E1003 18:17:13.828664 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:17:18 crc kubenswrapper[5081]: I1003 18:17:18.398128 5081 generic.go:334] "Generic (PLEG): container finished" podID="cac95354-2752-4206-bb64-102cfd811bb2" containerID="27d74d5e38e57be3faf3b2df6fddebf8aee636244fd58cecba0c34b086e52a81" exitCode=0 Oct 03 18:17:18 crc kubenswrapper[5081]: I1003 18:17:18.398538 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" event={"ID":"cac95354-2752-4206-bb64-102cfd811bb2","Type":"ContainerDied","Data":"27d74d5e38e57be3faf3b2df6fddebf8aee636244fd58cecba0c34b086e52a81"} Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.528463 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.562920 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-qmvpv"] Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.572133 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-qmvpv"] Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.663398 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cac95354-2752-4206-bb64-102cfd811bb2-host\") pod \"cac95354-2752-4206-bb64-102cfd811bb2\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.663916 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd2bj\" (UniqueName: \"kubernetes.io/projected/cac95354-2752-4206-bb64-102cfd811bb2-kube-api-access-fd2bj\") pod \"cac95354-2752-4206-bb64-102cfd811bb2\" (UID: \"cac95354-2752-4206-bb64-102cfd811bb2\") " Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.663529 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cac95354-2752-4206-bb64-102cfd811bb2-host" (OuterVolumeSpecName: "host") pod "cac95354-2752-4206-bb64-102cfd811bb2" (UID: "cac95354-2752-4206-bb64-102cfd811bb2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.664404 5081 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cac95354-2752-4206-bb64-102cfd811bb2-host\") on node \"crc\" DevicePath \"\"" Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.674046 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cac95354-2752-4206-bb64-102cfd811bb2-kube-api-access-fd2bj" (OuterVolumeSpecName: "kube-api-access-fd2bj") pod "cac95354-2752-4206-bb64-102cfd811bb2" (UID: "cac95354-2752-4206-bb64-102cfd811bb2"). InnerVolumeSpecName "kube-api-access-fd2bj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.766368 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd2bj\" (UniqueName: \"kubernetes.io/projected/cac95354-2752-4206-bb64-102cfd811bb2-kube-api-access-fd2bj\") on node \"crc\" DevicePath \"\"" Oct 03 18:17:19 crc kubenswrapper[5081]: I1003 18:17:19.838225 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cac95354-2752-4206-bb64-102cfd811bb2" path="/var/lib/kubelet/pods/cac95354-2752-4206-bb64-102cfd811bb2/volumes" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.425888 5081 scope.go:117] "RemoveContainer" containerID="27d74d5e38e57be3faf3b2df6fddebf8aee636244fd58cecba0c34b086e52a81" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.425952 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-qmvpv" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.748241 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-vb76g"] Oct 03 18:17:20 crc kubenswrapper[5081]: E1003 18:17:20.749158 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a66362-d12f-49cb-bbf2-672024bbd70d" containerName="collect-profiles" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.749175 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a66362-d12f-49cb-bbf2-672024bbd70d" containerName="collect-profiles" Oct 03 18:17:20 crc kubenswrapper[5081]: E1003 18:17:20.749199 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cac95354-2752-4206-bb64-102cfd811bb2" containerName="container-00" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.749206 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="cac95354-2752-4206-bb64-102cfd811bb2" containerName="container-00" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.749456 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="cac95354-2752-4206-bb64-102cfd811bb2" containerName="container-00" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.749470 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7a66362-d12f-49cb-bbf2-672024bbd70d" containerName="collect-profiles" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.750376 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.793055 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjd5x\" (UniqueName: \"kubernetes.io/projected/48459b09-6924-42fb-a19c-260a88a0ceca-kube-api-access-pjd5x\") pod \"crc-debug-vb76g\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.793205 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48459b09-6924-42fb-a19c-260a88a0ceca-host\") pod \"crc-debug-vb76g\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.896513 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjd5x\" (UniqueName: \"kubernetes.io/projected/48459b09-6924-42fb-a19c-260a88a0ceca-kube-api-access-pjd5x\") pod \"crc-debug-vb76g\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.897850 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48459b09-6924-42fb-a19c-260a88a0ceca-host\") pod \"crc-debug-vb76g\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.898333 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48459b09-6924-42fb-a19c-260a88a0ceca-host\") pod \"crc-debug-vb76g\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:20 crc kubenswrapper[5081]: I1003 18:17:20.919755 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjd5x\" (UniqueName: \"kubernetes.io/projected/48459b09-6924-42fb-a19c-260a88a0ceca-kube-api-access-pjd5x\") pod \"crc-debug-vb76g\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:21 crc kubenswrapper[5081]: I1003 18:17:21.069548 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:21 crc kubenswrapper[5081]: I1003 18:17:21.438430 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" event={"ID":"48459b09-6924-42fb-a19c-260a88a0ceca","Type":"ContainerStarted","Data":"d0fad12341ce6932b68db256e3cd7c5cc9b0b4afaa9620b00a6db0b9783dd7ed"} Oct 03 18:17:21 crc kubenswrapper[5081]: I1003 18:17:21.439237 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" event={"ID":"48459b09-6924-42fb-a19c-260a88a0ceca","Type":"ContainerStarted","Data":"93003ea25a57a52dfec929945a8943f4be65063641c0e0c74ec1aa9d3c3d4c6a"} Oct 03 18:17:21 crc kubenswrapper[5081]: I1003 18:17:21.456203 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" podStartSLOduration=1.45618568 podStartE2EDuration="1.45618568s" podCreationTimestamp="2025-10-03 18:17:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 18:17:21.453353369 +0000 UTC m=+10160.418909982" watchObservedRunningTime="2025-10-03 18:17:21.45618568 +0000 UTC m=+10160.421742293" Oct 03 18:17:22 crc kubenswrapper[5081]: I1003 18:17:22.449634 5081 generic.go:334] "Generic (PLEG): container finished" podID="48459b09-6924-42fb-a19c-260a88a0ceca" containerID="d0fad12341ce6932b68db256e3cd7c5cc9b0b4afaa9620b00a6db0b9783dd7ed" exitCode=0 Oct 03 18:17:22 crc kubenswrapper[5081]: I1003 18:17:22.449753 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" event={"ID":"48459b09-6924-42fb-a19c-260a88a0ceca","Type":"ContainerDied","Data":"d0fad12341ce6932b68db256e3cd7c5cc9b0b4afaa9620b00a6db0b9783dd7ed"} Oct 03 18:17:23 crc kubenswrapper[5081]: I1003 18:17:23.912260 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.058079 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48459b09-6924-42fb-a19c-260a88a0ceca-host\") pod \"48459b09-6924-42fb-a19c-260a88a0ceca\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.058197 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjd5x\" (UniqueName: \"kubernetes.io/projected/48459b09-6924-42fb-a19c-260a88a0ceca-kube-api-access-pjd5x\") pod \"48459b09-6924-42fb-a19c-260a88a0ceca\" (UID: \"48459b09-6924-42fb-a19c-260a88a0ceca\") " Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.058385 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48459b09-6924-42fb-a19c-260a88a0ceca-host" (OuterVolumeSpecName: "host") pod "48459b09-6924-42fb-a19c-260a88a0ceca" (UID: "48459b09-6924-42fb-a19c-260a88a0ceca"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.059125 5081 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48459b09-6924-42fb-a19c-260a88a0ceca-host\") on node \"crc\" DevicePath \"\"" Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.063159 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48459b09-6924-42fb-a19c-260a88a0ceca-kube-api-access-pjd5x" (OuterVolumeSpecName: "kube-api-access-pjd5x") pod "48459b09-6924-42fb-a19c-260a88a0ceca" (UID: "48459b09-6924-42fb-a19c-260a88a0ceca"). InnerVolumeSpecName "kube-api-access-pjd5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.160927 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjd5x\" (UniqueName: \"kubernetes.io/projected/48459b09-6924-42fb-a19c-260a88a0ceca-kube-api-access-pjd5x\") on node \"crc\" DevicePath \"\"" Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.468869 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" event={"ID":"48459b09-6924-42fb-a19c-260a88a0ceca","Type":"ContainerDied","Data":"93003ea25a57a52dfec929945a8943f4be65063641c0e0c74ec1aa9d3c3d4c6a"} Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.468908 5081 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93003ea25a57a52dfec929945a8943f4be65063641c0e0c74ec1aa9d3c3d4c6a" Oct 03 18:17:24 crc kubenswrapper[5081]: I1003 18:17:24.468957 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-vb76g" Oct 03 18:17:26 crc kubenswrapper[5081]: I1003 18:17:26.827261 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:17:26 crc kubenswrapper[5081]: E1003 18:17:26.828002 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:17:32 crc kubenswrapper[5081]: I1003 18:17:32.265883 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-vb76g"] Oct 03 18:17:32 crc kubenswrapper[5081]: I1003 18:17:32.277681 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-vb76g"] Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.459011 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-8z57t"] Oct 03 18:17:33 crc kubenswrapper[5081]: E1003 18:17:33.461705 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48459b09-6924-42fb-a19c-260a88a0ceca" containerName="container-00" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.461820 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="48459b09-6924-42fb-a19c-260a88a0ceca" containerName="container-00" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.462232 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="48459b09-6924-42fb-a19c-260a88a0ceca" containerName="container-00" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.463237 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.581179 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-host\") pod \"crc-debug-8z57t\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.581272 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr6tg\" (UniqueName: \"kubernetes.io/projected/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-kube-api-access-kr6tg\") pod \"crc-debug-8z57t\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.683693 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-host\") pod \"crc-debug-8z57t\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.683784 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr6tg\" (UniqueName: \"kubernetes.io/projected/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-kube-api-access-kr6tg\") pod \"crc-debug-8z57t\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.683829 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-host\") pod \"crc-debug-8z57t\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.705873 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr6tg\" (UniqueName: \"kubernetes.io/projected/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-kube-api-access-kr6tg\") pod \"crc-debug-8z57t\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.786232 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:33 crc kubenswrapper[5081]: I1003 18:17:33.840993 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48459b09-6924-42fb-a19c-260a88a0ceca" path="/var/lib/kubelet/pods/48459b09-6924-42fb-a19c-260a88a0ceca/volumes" Oct 03 18:17:34 crc kubenswrapper[5081]: I1003 18:17:34.570927 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-8z57t" event={"ID":"446f3314-e6d8-4d1e-8e7a-3b2bc832d031","Type":"ContainerStarted","Data":"bf9a166566772bbc4d16759964fc16e95faccf265206c5392e34d4e4ee0effb5"} Oct 03 18:17:35 crc kubenswrapper[5081]: I1003 18:17:35.602079 5081 generic.go:334] "Generic (PLEG): container finished" podID="446f3314-e6d8-4d1e-8e7a-3b2bc832d031" containerID="e73e2872265d5da1cace4048629afcf88e6c790c34dd62a3dad630ee5cd77564" exitCode=0 Oct 03 18:17:35 crc kubenswrapper[5081]: I1003 18:17:35.602175 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/crc-debug-8z57t" event={"ID":"446f3314-e6d8-4d1e-8e7a-3b2bc832d031","Type":"ContainerDied","Data":"e73e2872265d5da1cace4048629afcf88e6c790c34dd62a3dad630ee5cd77564"} Oct 03 18:17:35 crc kubenswrapper[5081]: I1003 18:17:35.637139 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-8z57t"] Oct 03 18:17:35 crc kubenswrapper[5081]: I1003 18:17:35.650251 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tkqzh/crc-debug-8z57t"] Oct 03 18:17:36 crc kubenswrapper[5081]: I1003 18:17:36.717291 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:36 crc kubenswrapper[5081]: I1003 18:17:36.850176 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr6tg\" (UniqueName: \"kubernetes.io/projected/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-kube-api-access-kr6tg\") pod \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " Oct 03 18:17:36 crc kubenswrapper[5081]: I1003 18:17:36.850620 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-host\") pod \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\" (UID: \"446f3314-e6d8-4d1e-8e7a-3b2bc832d031\") " Oct 03 18:17:36 crc kubenswrapper[5081]: I1003 18:17:36.850732 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-host" (OuterVolumeSpecName: "host") pod "446f3314-e6d8-4d1e-8e7a-3b2bc832d031" (UID: "446f3314-e6d8-4d1e-8e7a-3b2bc832d031"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 18:17:36 crc kubenswrapper[5081]: I1003 18:17:36.851537 5081 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-host\") on node \"crc\" DevicePath \"\"" Oct 03 18:17:36 crc kubenswrapper[5081]: I1003 18:17:36.854734 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-kube-api-access-kr6tg" (OuterVolumeSpecName: "kube-api-access-kr6tg") pod "446f3314-e6d8-4d1e-8e7a-3b2bc832d031" (UID: "446f3314-e6d8-4d1e-8e7a-3b2bc832d031"). InnerVolumeSpecName "kube-api-access-kr6tg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:17:36 crc kubenswrapper[5081]: I1003 18:17:36.955157 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr6tg\" (UniqueName: \"kubernetes.io/projected/446f3314-e6d8-4d1e-8e7a-3b2bc832d031-kube-api-access-kr6tg\") on node \"crc\" DevicePath \"\"" Oct 03 18:17:37 crc kubenswrapper[5081]: I1003 18:17:37.623783 5081 scope.go:117] "RemoveContainer" containerID="e73e2872265d5da1cace4048629afcf88e6c790c34dd62a3dad630ee5cd77564" Oct 03 18:17:37 crc kubenswrapper[5081]: I1003 18:17:37.623959 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/crc-debug-8z57t" Oct 03 18:17:37 crc kubenswrapper[5081]: I1003 18:17:37.843193 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="446f3314-e6d8-4d1e-8e7a-3b2bc832d031" path="/var/lib/kubelet/pods/446f3314-e6d8-4d1e-8e7a-3b2bc832d031/volumes" Oct 03 18:17:40 crc kubenswrapper[5081]: I1003 18:17:40.828421 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:17:40 crc kubenswrapper[5081]: E1003 18:17:40.829139 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:17:51 crc kubenswrapper[5081]: I1003 18:17:51.836419 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:17:51 crc kubenswrapper[5081]: E1003 18:17:51.837118 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:18:01 crc kubenswrapper[5081]: I1003 18:18:01.919003 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2_35d957f7-e16f-4144-b31d-db3861d30081/util/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.102852 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2_35d957f7-e16f-4144-b31d-db3861d30081/pull/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.113761 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2_35d957f7-e16f-4144-b31d-db3861d30081/util/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.114361 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2_35d957f7-e16f-4144-b31d-db3861d30081/pull/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.285758 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2_35d957f7-e16f-4144-b31d-db3861d30081/util/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.306799 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2_35d957f7-e16f-4144-b31d-db3861d30081/extract/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.317051 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_00fdfcf229b8ff1fdb3fe92d2c04f3dec332acb95c91f2ef3a9c95bafe24wc2_35d957f7-e16f-4144-b31d-db3861d30081/pull/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.479979 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6d6d64fdcf-xfg45_e762bb01-e884-43df-afe3-2c4bc45136a8/kube-rbac-proxy/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.557871 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8686fd99f7-cpmwx_7d518e4e-beff-4962-83a3-e4147b2cefed/kube-rbac-proxy/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.591769 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6d6d64fdcf-xfg45_e762bb01-e884-43df-afe3-2c4bc45136a8/manager/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.797013 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8686fd99f7-cpmwx_7d518e4e-beff-4962-83a3-e4147b2cefed/manager/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.808199 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-58d86cd59d-8pt7x_fa557e19-6921-4cb5-88b0-10ee3093201c/manager/0.log" Oct 03 18:18:02 crc kubenswrapper[5081]: I1003 18:18:02.812872 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-58d86cd59d-8pt7x_fa557e19-6921-4cb5-88b0-10ee3093201c/kube-rbac-proxy/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.009148 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-d785ddfd5-mqj79_4bea9891-fd7e-44ed-9af7-868cb55a9a59/kube-rbac-proxy/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.202825 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5ffbdb7ddf-s5dz2_e305740c-d1a1-4150-ab8f-0742d6a50db3/kube-rbac-proxy/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.275143 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-d785ddfd5-mqj79_4bea9891-fd7e-44ed-9af7-868cb55a9a59/manager/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.311112 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5ffbdb7ddf-s5dz2_e305740c-d1a1-4150-ab8f-0742d6a50db3/manager/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.425222 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-586b66cf4f-t8jvq_717d0fb4-cd0f-42b2-aca0-47e6166fe5d0/kube-rbac-proxy/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.463220 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-586b66cf4f-t8jvq_717d0fb4-cd0f-42b2-aca0-47e6166fe5d0/manager/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.559182 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7c9978f67-sz2f7_5140942d-8224-4889-b650-7ebcd0ce93a1/kube-rbac-proxy/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.747179 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-59b5fc9845-fxw8p_a4711b00-4b88-47ef-9d5b-c01b57ac9b18/kube-rbac-proxy/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.804320 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-59b5fc9845-fxw8p_a4711b00-4b88-47ef-9d5b-c01b57ac9b18/manager/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.857796 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7c9978f67-sz2f7_5140942d-8224-4889-b650-7ebcd0ce93a1/manager/0.log" Oct 03 18:18:03 crc kubenswrapper[5081]: I1003 18:18:03.964944 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-6c9969c6c6-qsnkt_1f197349-94d5-4ef3-962b-89045495d0c9/kube-rbac-proxy/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.136811 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-6c9969c6c6-qsnkt_1f197349-94d5-4ef3-962b-89045495d0c9/manager/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.183213 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-66fdd975d9-vjfhq_85db4351-f8f6-436b-9cf1-eb28aa937b21/kube-rbac-proxy/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.197594 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-66fdd975d9-vjfhq_85db4351-f8f6-436b-9cf1-eb28aa937b21/manager/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.337149 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-696ff4bcdd-92bxf_61233e66-00aa-4863-be3d-56231db9d643/kube-rbac-proxy/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.413547 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-696ff4bcdd-92bxf_61233e66-00aa-4863-be3d-56231db9d643/manager/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.534137 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-549fb68678-xrlvp_a810b266-4fbd-4034-add2-362aa5496443/kube-rbac-proxy/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.636841 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-549fb68678-xrlvp_a810b266-4fbd-4034-add2-362aa5496443/manager/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.657305 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5b45478b88-glxmc_5d0a686a-cc92-40dc-a408-9b02863a2337/kube-rbac-proxy/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.828745 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:18:04 crc kubenswrapper[5081]: E1003 18:18:04.828978 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.861635 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-b4444585c-qx7v7_109fc9da-53eb-440c-9e33-60388a4ec529/kube-rbac-proxy/0.log" Oct 03 18:18:04 crc kubenswrapper[5081]: I1003 18:18:04.991677 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-b4444585c-qx7v7_109fc9da-53eb-440c-9e33-60388a4ec529/manager/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.032273 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5b45478b88-glxmc_5d0a686a-cc92-40dc-a408-9b02863a2337/manager/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.176677 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw_73d438ca-470e-400d-9314-6567907fa58e/kube-rbac-proxy/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.202267 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7fb4f565cdn27nw_73d438ca-470e-400d-9314-6567907fa58e/manager/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.344136 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54df7874c5-tnb4t_83299e0a-8094-4228-86f2-e0b290cd3571/kube-rbac-proxy/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.397858 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-86f8d7b75f-4qd92_ba803456-8067-4e6c-8233-e6293b2977d7/kube-rbac-proxy/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.806757 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-855d7949fc-2w6tr_cef2a91a-fb0a-418a-bc2a-83e535750cbd/kube-rbac-proxy/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.861643 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-86f8d7b75f-4qd92_ba803456-8067-4e6c-8233-e6293b2977d7/operator/0.log" Oct 03 18:18:05 crc kubenswrapper[5081]: I1003 18:18:05.927224 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-z7szh_adf64075-5998-49ce-99b2-a34e10a163ad/registry-server/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.157667 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-ccbfcb8c-hw6vw_423af876-fc14-4fba-8835-4127010e0888/kube-rbac-proxy/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.157784 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-855d7949fc-2w6tr_cef2a91a-fb0a-418a-bc2a-83e535750cbd/manager/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.347232 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-ccbfcb8c-hw6vw_423af876-fc14-4fba-8835-4127010e0888/manager/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.387525 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-tptrj_3a7aaeef-f949-461a-be01-805e945451d3/operator/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.628602 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-76d5577b-4ngsx_770e893d-89aa-417c-9455-599c14023853/manager/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.663083 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-76d5577b-4ngsx_770e893d-89aa-417c-9455-599c14023853/kube-rbac-proxy/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.734506 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5ffb97cddf-wzqnn_09b461bb-473a-4d23-b18c-00d456eb8810/kube-rbac-proxy/0.log" Oct 03 18:18:06 crc kubenswrapper[5081]: I1003 18:18:06.974111 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb6dcddc-j47qs_a5c423f5-481d-4557-ac32-30285a8d7ed9/kube-rbac-proxy/0.log" Oct 03 18:18:07 crc kubenswrapper[5081]: I1003 18:18:07.008883 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb6dcddc-j47qs_a5c423f5-481d-4557-ac32-30285a8d7ed9/manager/0.log" Oct 03 18:18:07 crc kubenswrapper[5081]: I1003 18:18:07.173727 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5ffb97cddf-wzqnn_09b461bb-473a-4d23-b18c-00d456eb8810/manager/0.log" Oct 03 18:18:07 crc kubenswrapper[5081]: I1003 18:18:07.179603 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5595cf6c95-2c4gm_cc394603-b291-47e6-b048-1668f1857a84/kube-rbac-proxy/0.log" Oct 03 18:18:07 crc kubenswrapper[5081]: I1003 18:18:07.252316 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5595cf6c95-2c4gm_cc394603-b291-47e6-b048-1668f1857a84/manager/0.log" Oct 03 18:18:08 crc kubenswrapper[5081]: I1003 18:18:08.080282 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54df7874c5-tnb4t_83299e0a-8094-4228-86f2-e0b290cd3571/manager/0.log" Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.807659 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-82xt7"] Oct 03 18:18:10 crc kubenswrapper[5081]: E1003 18:18:10.808500 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="446f3314-e6d8-4d1e-8e7a-3b2bc832d031" containerName="container-00" Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.808513 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="446f3314-e6d8-4d1e-8e7a-3b2bc832d031" containerName="container-00" Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.808754 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="446f3314-e6d8-4d1e-8e7a-3b2bc832d031" containerName="container-00" Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.810507 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.826903 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-82xt7"] Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.905380 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-utilities\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.905762 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-catalog-content\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:10 crc kubenswrapper[5081]: I1003 18:18:10.905787 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpkg2\" (UniqueName: \"kubernetes.io/projected/bed1fc53-ec81-46e3-9b79-f75b135ede18-kube-api-access-hpkg2\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.007954 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-utilities\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.008034 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-catalog-content\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.008062 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpkg2\" (UniqueName: \"kubernetes.io/projected/bed1fc53-ec81-46e3-9b79-f75b135ede18-kube-api-access-hpkg2\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.008648 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-utilities\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.008851 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-catalog-content\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.042659 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpkg2\" (UniqueName: \"kubernetes.io/projected/bed1fc53-ec81-46e3-9b79-f75b135ede18-kube-api-access-hpkg2\") pod \"redhat-operators-82xt7\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.145611 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.877924 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-82xt7"] Oct 03 18:18:11 crc kubenswrapper[5081]: I1003 18:18:11.988686 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82xt7" event={"ID":"bed1fc53-ec81-46e3-9b79-f75b135ede18","Type":"ContainerStarted","Data":"e8b46e1fe75fd929b6bf28d067168884814bacbc1dbbfe7517d3f248a2844597"} Oct 03 18:18:12 crc kubenswrapper[5081]: I1003 18:18:12.999155 5081 generic.go:334] "Generic (PLEG): container finished" podID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerID="efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b" exitCode=0 Oct 03 18:18:12 crc kubenswrapper[5081]: I1003 18:18:12.999211 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82xt7" event={"ID":"bed1fc53-ec81-46e3-9b79-f75b135ede18","Type":"ContainerDied","Data":"efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b"} Oct 03 18:18:15 crc kubenswrapper[5081]: I1003 18:18:15.019659 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82xt7" event={"ID":"bed1fc53-ec81-46e3-9b79-f75b135ede18","Type":"ContainerStarted","Data":"f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e"} Oct 03 18:18:17 crc kubenswrapper[5081]: I1003 18:18:17.828102 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:18:17 crc kubenswrapper[5081]: E1003 18:18:17.829024 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:18:24 crc kubenswrapper[5081]: I1003 18:18:24.574365 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-lk9tv_84647337-65f5-40fe-9ee7-62f1faebb04c/control-plane-machine-set-operator/0.log" Oct 03 18:18:24 crc kubenswrapper[5081]: I1003 18:18:24.786311 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-qk55l_b43aaaa2-f6bb-449b-90ce-d7324dd5a06d/machine-api-operator/0.log" Oct 03 18:18:24 crc kubenswrapper[5081]: I1003 18:18:24.794131 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-qk55l_b43aaaa2-f6bb-449b-90ce-d7324dd5a06d/kube-rbac-proxy/0.log" Oct 03 18:18:29 crc kubenswrapper[5081]: I1003 18:18:29.827575 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:18:29 crc kubenswrapper[5081]: E1003 18:18:29.828371 5081 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-lkz79_openshift-machine-config-operator(fc0e93f4-3228-4f47-8edf-4d12bf3baddd)\"" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" Oct 03 18:18:32 crc kubenswrapper[5081]: I1003 18:18:32.249731 5081 generic.go:334] "Generic (PLEG): container finished" podID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerID="f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e" exitCode=0 Oct 03 18:18:32 crc kubenswrapper[5081]: I1003 18:18:32.250309 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82xt7" event={"ID":"bed1fc53-ec81-46e3-9b79-f75b135ede18","Type":"ContainerDied","Data":"f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e"} Oct 03 18:18:34 crc kubenswrapper[5081]: I1003 18:18:34.272825 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82xt7" event={"ID":"bed1fc53-ec81-46e3-9b79-f75b135ede18","Type":"ContainerStarted","Data":"51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2"} Oct 03 18:18:34 crc kubenswrapper[5081]: I1003 18:18:34.303048 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-82xt7" podStartSLOduration=3.894823056 podStartE2EDuration="24.303027159s" podCreationTimestamp="2025-10-03 18:18:10 +0000 UTC" firstStartedPulling="2025-10-03 18:18:13.001231602 +0000 UTC m=+10211.966788215" lastFinishedPulling="2025-10-03 18:18:33.409435705 +0000 UTC m=+10232.374992318" observedRunningTime="2025-10-03 18:18:34.294514366 +0000 UTC m=+10233.260070999" watchObservedRunningTime="2025-10-03 18:18:34.303027159 +0000 UTC m=+10233.268583772" Oct 03 18:18:38 crc kubenswrapper[5081]: I1003 18:18:38.381286 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-q4mtf_a6c6ba02-ba1c-4143-a6a1-86b6c3b406f8/cert-manager-controller/0.log" Oct 03 18:18:38 crc kubenswrapper[5081]: I1003 18:18:38.416632 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-xf8jt_a4333955-423d-4f67-9863-023ead4ffc0b/cert-manager-cainjector/0.log" Oct 03 18:18:38 crc kubenswrapper[5081]: I1003 18:18:38.596177 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-sckqg_fc3d77b5-f460-4dfe-b077-fd580b46f949/cert-manager-webhook/0.log" Oct 03 18:18:41 crc kubenswrapper[5081]: I1003 18:18:41.146666 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:41 crc kubenswrapper[5081]: I1003 18:18:41.146943 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:41 crc kubenswrapper[5081]: I1003 18:18:41.211431 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:41 crc kubenswrapper[5081]: I1003 18:18:41.381468 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:41 crc kubenswrapper[5081]: I1003 18:18:41.837175 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:18:42 crc kubenswrapper[5081]: I1003 18:18:42.012808 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-82xt7"] Oct 03 18:18:42 crc kubenswrapper[5081]: I1003 18:18:42.349047 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"988809b56bdeece34c9025950284c1fd95f126cfbedbddd2444820cd4435109d"} Oct 03 18:18:43 crc kubenswrapper[5081]: I1003 18:18:43.357872 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-82xt7" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="registry-server" containerID="cri-o://51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2" gracePeriod=2 Oct 03 18:18:43 crc kubenswrapper[5081]: I1003 18:18:43.882679 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.036941 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-utilities\") pod \"bed1fc53-ec81-46e3-9b79-f75b135ede18\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.037253 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-catalog-content\") pod \"bed1fc53-ec81-46e3-9b79-f75b135ede18\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.037441 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpkg2\" (UniqueName: \"kubernetes.io/projected/bed1fc53-ec81-46e3-9b79-f75b135ede18-kube-api-access-hpkg2\") pod \"bed1fc53-ec81-46e3-9b79-f75b135ede18\" (UID: \"bed1fc53-ec81-46e3-9b79-f75b135ede18\") " Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.038191 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-utilities" (OuterVolumeSpecName: "utilities") pod "bed1fc53-ec81-46e3-9b79-f75b135ede18" (UID: "bed1fc53-ec81-46e3-9b79-f75b135ede18"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.049862 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bed1fc53-ec81-46e3-9b79-f75b135ede18-kube-api-access-hpkg2" (OuterVolumeSpecName: "kube-api-access-hpkg2") pod "bed1fc53-ec81-46e3-9b79-f75b135ede18" (UID: "bed1fc53-ec81-46e3-9b79-f75b135ede18"). InnerVolumeSpecName "kube-api-access-hpkg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.126858 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bed1fc53-ec81-46e3-9b79-f75b135ede18" (UID: "bed1fc53-ec81-46e3-9b79-f75b135ede18"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.139097 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpkg2\" (UniqueName: \"kubernetes.io/projected/bed1fc53-ec81-46e3-9b79-f75b135ede18-kube-api-access-hpkg2\") on node \"crc\" DevicePath \"\"" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.139126 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.139136 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bed1fc53-ec81-46e3-9b79-f75b135ede18-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.369942 5081 generic.go:334] "Generic (PLEG): container finished" podID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerID="51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2" exitCode=0 Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.369989 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82xt7" event={"ID":"bed1fc53-ec81-46e3-9b79-f75b135ede18","Type":"ContainerDied","Data":"51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2"} Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.370021 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82xt7" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.370039 5081 scope.go:117] "RemoveContainer" containerID="51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.370027 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82xt7" event={"ID":"bed1fc53-ec81-46e3-9b79-f75b135ede18","Type":"ContainerDied","Data":"e8b46e1fe75fd929b6bf28d067168884814bacbc1dbbfe7517d3f248a2844597"} Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.394003 5081 scope.go:117] "RemoveContainer" containerID="f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.412817 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-82xt7"] Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.424777 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-82xt7"] Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.425968 5081 scope.go:117] "RemoveContainer" containerID="efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.471455 5081 scope.go:117] "RemoveContainer" containerID="51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2" Oct 03 18:18:44 crc kubenswrapper[5081]: E1003 18:18:44.471918 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2\": container with ID starting with 51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2 not found: ID does not exist" containerID="51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.471968 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2"} err="failed to get container status \"51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2\": rpc error: code = NotFound desc = could not find container \"51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2\": container with ID starting with 51c0b088c9f53006fd1fa2b920cf4a227a5f067d6d2249ba7624942c286e3ca2 not found: ID does not exist" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.471997 5081 scope.go:117] "RemoveContainer" containerID="f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e" Oct 03 18:18:44 crc kubenswrapper[5081]: E1003 18:18:44.472321 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e\": container with ID starting with f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e not found: ID does not exist" containerID="f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.472380 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e"} err="failed to get container status \"f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e\": rpc error: code = NotFound desc = could not find container \"f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e\": container with ID starting with f837c2e2ec24a389ffc0bce588a7dba5347b5740f7eb5d3927d2582b11169f3e not found: ID does not exist" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.472413 5081 scope.go:117] "RemoveContainer" containerID="efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b" Oct 03 18:18:44 crc kubenswrapper[5081]: E1003 18:18:44.472672 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b\": container with ID starting with efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b not found: ID does not exist" containerID="efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b" Oct 03 18:18:44 crc kubenswrapper[5081]: I1003 18:18:44.472697 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b"} err="failed to get container status \"efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b\": rpc error: code = NotFound desc = could not find container \"efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b\": container with ID starting with efba218d5f76d629549491366acd0e73394ade69ce7e5d52fb956564f8f3869b not found: ID does not exist" Oct 03 18:18:45 crc kubenswrapper[5081]: I1003 18:18:45.841849 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" path="/var/lib/kubelet/pods/bed1fc53-ec81-46e3-9b79-f75b135ede18/volumes" Oct 03 18:18:50 crc kubenswrapper[5081]: I1003 18:18:50.695759 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-fxssj_36d4396d-83ce-4642-b81f-77f773a8fe3f/nmstate-console-plugin/0.log" Oct 03 18:18:50 crc kubenswrapper[5081]: I1003 18:18:50.868511 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-r4whk_b09fac9a-1cba-40e0-ad40-341209ef1014/nmstate-handler/0.log" Oct 03 18:18:50 crc kubenswrapper[5081]: I1003 18:18:50.918286 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-f74bb_cef12029-69af-4423-9f86-535950d9a8c8/kube-rbac-proxy/0.log" Oct 03 18:18:50 crc kubenswrapper[5081]: I1003 18:18:50.967805 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-f74bb_cef12029-69af-4423-9f86-535950d9a8c8/nmstate-metrics/0.log" Oct 03 18:18:51 crc kubenswrapper[5081]: I1003 18:18:51.155578 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-82vx7_1eb24e7f-b4c5-4fdc-8acf-2c34145dd409/nmstate-operator/0.log" Oct 03 18:18:51 crc kubenswrapper[5081]: I1003 18:18:51.233748 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-6lvtr_e5bd637d-02f0-46a0-af03-14f8a9235b60/nmstate-webhook/0.log" Oct 03 18:19:04 crc kubenswrapper[5081]: I1003 18:19:04.866394 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-99p8w_1d713435-3b4a-4464-8400-55b74becb2d2/kube-rbac-proxy/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.160382 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-frr-files/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.296807 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-99p8w_1d713435-3b4a-4464-8400-55b74becb2d2/controller/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.386252 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-reloader/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.492247 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-frr-files/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.493372 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-metrics/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.550619 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-reloader/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.790272 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-reloader/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.793118 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-metrics/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.828300 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-frr-files/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.843067 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-metrics/0.log" Oct 03 18:19:05 crc kubenswrapper[5081]: I1003 18:19:05.988787 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-frr-files/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.032817 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-reloader/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.085525 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/controller/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.090710 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/cp-metrics/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.221156 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/frr-metrics/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.289353 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/kube-rbac-proxy-frr/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.306333 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/kube-rbac-proxy/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.512874 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/reloader/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.583068 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-5244x_08b3556e-4f32-4c35-9074-711ed072ea2d/frr-k8s-webhook-server/0.log" Oct 03 18:19:06 crc kubenswrapper[5081]: I1003 18:19:06.838468 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-696555d8b4-p86s5_b0a63649-dc26-4b38-ba41-7b93349c2385/manager/0.log" Oct 03 18:19:07 crc kubenswrapper[5081]: I1003 18:19:07.072030 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-656b449cbd-zcx5t_ee37d6b3-b01e-4dcf-9c81-ba25b7860a7e/webhook-server/0.log" Oct 03 18:19:07 crc kubenswrapper[5081]: I1003 18:19:07.152190 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-5jcvg_b0782857-7149-4abb-bf32-d50899f01453/kube-rbac-proxy/0.log" Oct 03 18:19:08 crc kubenswrapper[5081]: I1003 18:19:08.222328 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-5jcvg_b0782857-7149-4abb-bf32-d50899f01453/speaker/0.log" Oct 03 18:19:09 crc kubenswrapper[5081]: I1003 18:19:09.512978 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gn4v6_a0c3b06d-5d2f-46b6-ac2e-38e5bd382f40/frr/0.log" Oct 03 18:19:19 crc kubenswrapper[5081]: I1003 18:19:19.787694 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh_30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9/util/0.log" Oct 03 18:19:19 crc kubenswrapper[5081]: I1003 18:19:19.936751 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh_30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9/util/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.022328 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh_30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9/pull/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.042365 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh_30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9/pull/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.157110 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh_30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9/util/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.193621 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh_30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9/extract/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.232643 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69z9xsh_30b6aba5-eb9f-4f4e-aac3-cfeb3966bdc9/pull/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.360283 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t_52892b90-1940-4bfa-8460-c3dce0452734/util/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.571327 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t_52892b90-1940-4bfa-8460-c3dce0452734/util/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.593232 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t_52892b90-1940-4bfa-8460-c3dce0452734/pull/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.623371 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t_52892b90-1940-4bfa-8460-c3dce0452734/pull/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.755416 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t_52892b90-1940-4bfa-8460-c3dce0452734/util/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.762192 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t_52892b90-1940-4bfa-8460-c3dce0452734/pull/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.821093 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2rnf4t_52892b90-1940-4bfa-8460-c3dce0452734/extract/0.log" Oct 03 18:19:20 crc kubenswrapper[5081]: I1003 18:19:20.956774 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58_e940af2e-a01b-4861-9b5e-db37af275f1c/util/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.154469 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58_e940af2e-a01b-4861-9b5e-db37af275f1c/util/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.154721 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58_e940af2e-a01b-4861-9b5e-db37af275f1c/pull/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.186194 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58_e940af2e-a01b-4861-9b5e-db37af275f1c/pull/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.333920 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58_e940af2e-a01b-4861-9b5e-db37af275f1c/extract/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.348412 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58_e940af2e-a01b-4861-9b5e-db37af275f1c/pull/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.348911 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dh4m58_e940af2e-a01b-4861-9b5e-db37af275f1c/util/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.519551 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pf89b_bcbbdb6b-7c85-4ec3-87ab-45560ec82d96/extract-utilities/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.698480 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pf89b_bcbbdb6b-7c85-4ec3-87ab-45560ec82d96/extract-utilities/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.702610 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pf89b_bcbbdb6b-7c85-4ec3-87ab-45560ec82d96/extract-content/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.707271 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pf89b_bcbbdb6b-7c85-4ec3-87ab-45560ec82d96/extract-content/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.944942 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pf89b_bcbbdb6b-7c85-4ec3-87ab-45560ec82d96/extract-content/0.log" Oct 03 18:19:21 crc kubenswrapper[5081]: I1003 18:19:21.958485 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pf89b_bcbbdb6b-7c85-4ec3-87ab-45560ec82d96/extract-utilities/0.log" Oct 03 18:19:22 crc kubenswrapper[5081]: I1003 18:19:22.154404 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9bwhw_7d28813c-7855-40f7-bdbc-3b4541272950/extract-utilities/0.log" Oct 03 18:19:22 crc kubenswrapper[5081]: I1003 18:19:22.397522 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9bwhw_7d28813c-7855-40f7-bdbc-3b4541272950/extract-content/0.log" Oct 03 18:19:22 crc kubenswrapper[5081]: I1003 18:19:22.452607 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9bwhw_7d28813c-7855-40f7-bdbc-3b4541272950/extract-content/0.log" Oct 03 18:19:22 crc kubenswrapper[5081]: I1003 18:19:22.555801 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9bwhw_7d28813c-7855-40f7-bdbc-3b4541272950/extract-utilities/0.log" Oct 03 18:19:22 crc kubenswrapper[5081]: I1003 18:19:22.742507 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9bwhw_7d28813c-7855-40f7-bdbc-3b4541272950/extract-content/0.log" Oct 03 18:19:22 crc kubenswrapper[5081]: I1003 18:19:22.786292 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9bwhw_7d28813c-7855-40f7-bdbc-3b4541272950/extract-utilities/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.064330 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq_42f4375e-1b8e-4805-8031-a87e31f1d6c4/util/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.314025 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq_42f4375e-1b8e-4805-8031-a87e31f1d6c4/util/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.371374 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq_42f4375e-1b8e-4805-8031-a87e31f1d6c4/pull/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.410533 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq_42f4375e-1b8e-4805-8031-a87e31f1d6c4/pull/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.517050 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pf89b_bcbbdb6b-7c85-4ec3-87ab-45560ec82d96/registry-server/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.651477 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq_42f4375e-1b8e-4805-8031-a87e31f1d6c4/util/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.692979 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq_42f4375e-1b8e-4805-8031-a87e31f1d6c4/extract/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.716200 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835ccmstq_42f4375e-1b8e-4805-8031-a87e31f1d6c4/pull/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.917860 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-chvtb_79683909-dba0-4c91-9a78-09bdbc9da494/marketplace-operator/0.log" Oct 03 18:19:23 crc kubenswrapper[5081]: I1003 18:19:23.997977 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vqq89_fabdc78f-4187-4f36-82a3-ac9d05990b5a/extract-utilities/0.log" Oct 03 18:19:24 crc kubenswrapper[5081]: I1003 18:19:24.094246 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vqq89_fabdc78f-4187-4f36-82a3-ac9d05990b5a/extract-utilities/0.log" Oct 03 18:19:24 crc kubenswrapper[5081]: I1003 18:19:24.189079 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vqq89_fabdc78f-4187-4f36-82a3-ac9d05990b5a/extract-content/0.log" Oct 03 18:19:24 crc kubenswrapper[5081]: I1003 18:19:24.219799 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vqq89_fabdc78f-4187-4f36-82a3-ac9d05990b5a/extract-content/0.log" Oct 03 18:19:24 crc kubenswrapper[5081]: I1003 18:19:24.430088 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vqq89_fabdc78f-4187-4f36-82a3-ac9d05990b5a/extract-utilities/0.log" Oct 03 18:19:24 crc kubenswrapper[5081]: I1003 18:19:24.445630 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vqq89_fabdc78f-4187-4f36-82a3-ac9d05990b5a/extract-content/0.log" Oct 03 18:19:24 crc kubenswrapper[5081]: I1003 18:19:24.962217 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vqq89_fabdc78f-4187-4f36-82a3-ac9d05990b5a/registry-server/0.log" Oct 03 18:19:24 crc kubenswrapper[5081]: I1003 18:19:24.972216 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9bwhw_7d28813c-7855-40f7-bdbc-3b4541272950/registry-server/0.log" Oct 03 18:19:25 crc kubenswrapper[5081]: I1003 18:19:25.159398 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd4dm_2cd58fd3-9571-4d36-a37b-b6cf4337e792/extract-utilities/0.log" Oct 03 18:19:25 crc kubenswrapper[5081]: I1003 18:19:25.352405 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd4dm_2cd58fd3-9571-4d36-a37b-b6cf4337e792/extract-utilities/0.log" Oct 03 18:19:25 crc kubenswrapper[5081]: I1003 18:19:25.371689 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd4dm_2cd58fd3-9571-4d36-a37b-b6cf4337e792/extract-content/0.log" Oct 03 18:19:25 crc kubenswrapper[5081]: I1003 18:19:25.464985 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd4dm_2cd58fd3-9571-4d36-a37b-b6cf4337e792/extract-content/0.log" Oct 03 18:19:25 crc kubenswrapper[5081]: I1003 18:19:25.637782 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd4dm_2cd58fd3-9571-4d36-a37b-b6cf4337e792/extract-utilities/0.log" Oct 03 18:19:25 crc kubenswrapper[5081]: I1003 18:19:25.700544 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd4dm_2cd58fd3-9571-4d36-a37b-b6cf4337e792/extract-content/0.log" Oct 03 18:19:26 crc kubenswrapper[5081]: I1003 18:19:26.711687 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd4dm_2cd58fd3-9571-4d36-a37b-b6cf4337e792/registry-server/0.log" Oct 03 18:19:38 crc kubenswrapper[5081]: I1003 18:19:38.122166 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-z4jcm_83a23695-6bcb-4dbd-909d-0f7af9be2b25/prometheus-operator/0.log" Oct 03 18:19:38 crc kubenswrapper[5081]: I1003 18:19:38.265382 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-778b46994f-cct5k_57839bb1-8406-48c1-adf4-ed7dfe80723b/prometheus-operator-admission-webhook/0.log" Oct 03 18:19:38 crc kubenswrapper[5081]: I1003 18:19:38.351786 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-778b46994f-f269d_8b7bf516-55f6-4da8-baed-a26d0a03dbae/prometheus-operator-admission-webhook/0.log" Oct 03 18:19:38 crc kubenswrapper[5081]: I1003 18:19:38.441343 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-7jnvg_157e1a24-f062-4148-af25-b1bda9a5ef03/operator/0.log" Oct 03 18:19:38 crc kubenswrapper[5081]: I1003 18:19:38.531102 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-trfrl_db565abb-fbeb-4cd1-9c93-673a81facb8c/perses-operator/0.log" Oct 03 18:19:45 crc kubenswrapper[5081]: E1003 18:19:45.008591 5081 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.59:42832->38.102.83.59:44537: write tcp 38.102.83.59:42832->38.102.83.59:44537: write: broken pipe Oct 03 18:20:09 crc kubenswrapper[5081]: E1003 18:20:09.600639 5081 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.59:36778->38.102.83.59:44537: write tcp 38.102.83.59:36778->38.102.83.59:44537: write: broken pipe Oct 03 18:21:00 crc kubenswrapper[5081]: I1003 18:21:00.647921 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:21:00 crc kubenswrapper[5081]: I1003 18:21:00.648413 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:21:30 crc kubenswrapper[5081]: I1003 18:21:30.647350 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:21:30 crc kubenswrapper[5081]: I1003 18:21:30.647812 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:21:59 crc kubenswrapper[5081]: I1003 18:21:59.995432 5081 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ltvkl"] Oct 03 18:21:59 crc kubenswrapper[5081]: E1003 18:21:59.996507 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="extract-content" Oct 03 18:21:59 crc kubenswrapper[5081]: I1003 18:21:59.996524 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="extract-content" Oct 03 18:21:59 crc kubenswrapper[5081]: E1003 18:21:59.996553 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="extract-utilities" Oct 03 18:21:59 crc kubenswrapper[5081]: I1003 18:21:59.996603 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="extract-utilities" Oct 03 18:21:59 crc kubenswrapper[5081]: E1003 18:21:59.996640 5081 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="registry-server" Oct 03 18:21:59 crc kubenswrapper[5081]: I1003 18:21:59.996649 5081 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="registry-server" Oct 03 18:21:59 crc kubenswrapper[5081]: I1003 18:21:59.996930 5081 memory_manager.go:354] "RemoveStaleState removing state" podUID="bed1fc53-ec81-46e3-9b79-f75b135ede18" containerName="registry-server" Oct 03 18:21:59 crc kubenswrapper[5081]: I1003 18:21:59.999118 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.037838 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ltvkl"] Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.092142 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-catalog-content\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.092195 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkwxp\" (UniqueName: \"kubernetes.io/projected/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-kube-api-access-jkwxp\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.092350 5081 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-utilities\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.194646 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-utilities\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.194768 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-catalog-content\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.194805 5081 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkwxp\" (UniqueName: \"kubernetes.io/projected/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-kube-api-access-jkwxp\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.195767 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-utilities\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.196044 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-catalog-content\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.216603 5081 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkwxp\" (UniqueName: \"kubernetes.io/projected/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-kube-api-access-jkwxp\") pod \"certified-operators-ltvkl\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.337802 5081 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.650810 5081 patch_prober.go:28] interesting pod/machine-config-daemon-lkz79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.651091 5081 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.651131 5081 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.651932 5081 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"988809b56bdeece34c9025950284c1fd95f126cfbedbddd2444820cd4435109d"} pod="openshift-machine-config-operator/machine-config-daemon-lkz79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.652191 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" podUID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerName="machine-config-daemon" containerID="cri-o://988809b56bdeece34c9025950284c1fd95f126cfbedbddd2444820cd4435109d" gracePeriod=600 Oct 03 18:22:00 crc kubenswrapper[5081]: I1003 18:22:00.990349 5081 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ltvkl"] Oct 03 18:22:00 crc kubenswrapper[5081]: W1003 18:22:00.990760 5081 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1dedd26c_b3af_44b0_8739_6bb7a6526ceb.slice/crio-9b5db3bebf4b17116d61ebae9652b50244ca25d00da7b5878034a627289da95e WatchSource:0}: Error finding container 9b5db3bebf4b17116d61ebae9652b50244ca25d00da7b5878034a627289da95e: Status 404 returned error can't find the container with id 9b5db3bebf4b17116d61ebae9652b50244ca25d00da7b5878034a627289da95e Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.650927 5081 generic.go:334] "Generic (PLEG): container finished" podID="1dedd26c-b3af-44b0-8739-6bb7a6526ceb" containerID="22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef" exitCode=0 Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.651010 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltvkl" event={"ID":"1dedd26c-b3af-44b0-8739-6bb7a6526ceb","Type":"ContainerDied","Data":"22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef"} Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.651606 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltvkl" event={"ID":"1dedd26c-b3af-44b0-8739-6bb7a6526ceb","Type":"ContainerStarted","Data":"9b5db3bebf4b17116d61ebae9652b50244ca25d00da7b5878034a627289da95e"} Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.653235 5081 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.663868 5081 generic.go:334] "Generic (PLEG): container finished" podID="fc0e93f4-3228-4f47-8edf-4d12bf3baddd" containerID="988809b56bdeece34c9025950284c1fd95f126cfbedbddd2444820cd4435109d" exitCode=0 Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.663921 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerDied","Data":"988809b56bdeece34c9025950284c1fd95f126cfbedbddd2444820cd4435109d"} Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.663949 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-lkz79" event={"ID":"fc0e93f4-3228-4f47-8edf-4d12bf3baddd","Type":"ContainerStarted","Data":"25186fb920fdf6d2e9fe1a13b75449cfd916aaf2165c1633e0ad364ec945351f"} Oct 03 18:22:01 crc kubenswrapper[5081]: I1003 18:22:01.663967 5081 scope.go:117] "RemoveContainer" containerID="65c73b5cbf2208bb7e8e0c5c630a601ab21e11b2dbaf9cf128f4a5ec2deb4ee2" Oct 03 18:22:03 crc kubenswrapper[5081]: I1003 18:22:03.699467 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltvkl" event={"ID":"1dedd26c-b3af-44b0-8739-6bb7a6526ceb","Type":"ContainerStarted","Data":"50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e"} Oct 03 18:22:04 crc kubenswrapper[5081]: I1003 18:22:04.711180 5081 generic.go:334] "Generic (PLEG): container finished" podID="1dedd26c-b3af-44b0-8739-6bb7a6526ceb" containerID="50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e" exitCode=0 Oct 03 18:22:04 crc kubenswrapper[5081]: I1003 18:22:04.711521 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltvkl" event={"ID":"1dedd26c-b3af-44b0-8739-6bb7a6526ceb","Type":"ContainerDied","Data":"50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e"} Oct 03 18:22:05 crc kubenswrapper[5081]: I1003 18:22:05.721653 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltvkl" event={"ID":"1dedd26c-b3af-44b0-8739-6bb7a6526ceb","Type":"ContainerStarted","Data":"fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119"} Oct 03 18:22:05 crc kubenswrapper[5081]: I1003 18:22:05.745300 5081 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ltvkl" podStartSLOduration=3.273968163 podStartE2EDuration="6.745283838s" podCreationTimestamp="2025-10-03 18:21:59 +0000 UTC" firstStartedPulling="2025-10-03 18:22:01.653033265 +0000 UTC m=+10440.618589878" lastFinishedPulling="2025-10-03 18:22:05.12434893 +0000 UTC m=+10444.089905553" observedRunningTime="2025-10-03 18:22:05.741520621 +0000 UTC m=+10444.707077244" watchObservedRunningTime="2025-10-03 18:22:05.745283838 +0000 UTC m=+10444.710840451" Oct 03 18:22:10 crc kubenswrapper[5081]: I1003 18:22:10.338008 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:10 crc kubenswrapper[5081]: I1003 18:22:10.338639 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:10 crc kubenswrapper[5081]: I1003 18:22:10.395985 5081 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:10 crc kubenswrapper[5081]: I1003 18:22:10.828755 5081 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:10 crc kubenswrapper[5081]: I1003 18:22:10.911535 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ltvkl"] Oct 03 18:22:12 crc kubenswrapper[5081]: I1003 18:22:12.788800 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ltvkl" podUID="1dedd26c-b3af-44b0-8739-6bb7a6526ceb" containerName="registry-server" containerID="cri-o://fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119" gracePeriod=2 Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.300329 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.431193 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-catalog-content\") pod \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.431251 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwxp\" (UniqueName: \"kubernetes.io/projected/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-kube-api-access-jkwxp\") pod \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.431437 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-utilities\") pod \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\" (UID: \"1dedd26c-b3af-44b0-8739-6bb7a6526ceb\") " Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.432554 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-utilities" (OuterVolumeSpecName: "utilities") pod "1dedd26c-b3af-44b0-8739-6bb7a6526ceb" (UID: "1dedd26c-b3af-44b0-8739-6bb7a6526ceb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.439931 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-kube-api-access-jkwxp" (OuterVolumeSpecName: "kube-api-access-jkwxp") pod "1dedd26c-b3af-44b0-8739-6bb7a6526ceb" (UID: "1dedd26c-b3af-44b0-8739-6bb7a6526ceb"). InnerVolumeSpecName "kube-api-access-jkwxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.476355 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1dedd26c-b3af-44b0-8739-6bb7a6526ceb" (UID: "1dedd26c-b3af-44b0-8739-6bb7a6526ceb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.534434 5081 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.534482 5081 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.534507 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwxp\" (UniqueName: \"kubernetes.io/projected/1dedd26c-b3af-44b0-8739-6bb7a6526ceb-kube-api-access-jkwxp\") on node \"crc\" DevicePath \"\"" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.806830 5081 generic.go:334] "Generic (PLEG): container finished" podID="1dedd26c-b3af-44b0-8739-6bb7a6526ceb" containerID="fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119" exitCode=0 Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.806877 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltvkl" event={"ID":"1dedd26c-b3af-44b0-8739-6bb7a6526ceb","Type":"ContainerDied","Data":"fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119"} Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.806906 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ltvkl" event={"ID":"1dedd26c-b3af-44b0-8739-6bb7a6526ceb","Type":"ContainerDied","Data":"9b5db3bebf4b17116d61ebae9652b50244ca25d00da7b5878034a627289da95e"} Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.806928 5081 scope.go:117] "RemoveContainer" containerID="fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.807087 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ltvkl" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.835999 5081 scope.go:117] "RemoveContainer" containerID="50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.861996 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ltvkl"] Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.871215 5081 scope.go:117] "RemoveContainer" containerID="22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.875159 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ltvkl"] Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.922399 5081 scope.go:117] "RemoveContainer" containerID="fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119" Oct 03 18:22:13 crc kubenswrapper[5081]: E1003 18:22:13.922866 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119\": container with ID starting with fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119 not found: ID does not exist" containerID="fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.922920 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119"} err="failed to get container status \"fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119\": rpc error: code = NotFound desc = could not find container \"fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119\": container with ID starting with fc0814dddd90525e9bf4dbe074c54f58d5582d9544d6d455b29207cd73ef8119 not found: ID does not exist" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.922952 5081 scope.go:117] "RemoveContainer" containerID="50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e" Oct 03 18:22:13 crc kubenswrapper[5081]: E1003 18:22:13.923273 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e\": container with ID starting with 50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e not found: ID does not exist" containerID="50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.923308 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e"} err="failed to get container status \"50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e\": rpc error: code = NotFound desc = could not find container \"50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e\": container with ID starting with 50382bd055289b3ef20f52915a893bae4e4140e2f2a1f783f579043888287e4e not found: ID does not exist" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.923328 5081 scope.go:117] "RemoveContainer" containerID="22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef" Oct 03 18:22:13 crc kubenswrapper[5081]: E1003 18:22:13.924451 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef\": container with ID starting with 22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef not found: ID does not exist" containerID="22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef" Oct 03 18:22:13 crc kubenswrapper[5081]: I1003 18:22:13.924483 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef"} err="failed to get container status \"22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef\": rpc error: code = NotFound desc = could not find container \"22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef\": container with ID starting with 22214b6d7253f7f509afc8686994ae4e4a28bbd8faa1d07c87396c3c4179c6ef not found: ID does not exist" Oct 03 18:22:15 crc kubenswrapper[5081]: I1003 18:22:15.842947 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dedd26c-b3af-44b0-8739-6bb7a6526ceb" path="/var/lib/kubelet/pods/1dedd26c-b3af-44b0-8739-6bb7a6526ceb/volumes" Oct 03 18:22:22 crc kubenswrapper[5081]: I1003 18:22:22.922091 5081 generic.go:334] "Generic (PLEG): container finished" podID="db561d03-866c-4f07-9775-0c6f0a5cc7ff" containerID="f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3" exitCode=0 Oct 03 18:22:22 crc kubenswrapper[5081]: I1003 18:22:22.922167 5081 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" event={"ID":"db561d03-866c-4f07-9775-0c6f0a5cc7ff","Type":"ContainerDied","Data":"f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3"} Oct 03 18:22:22 crc kubenswrapper[5081]: I1003 18:22:22.923684 5081 scope.go:117] "RemoveContainer" containerID="f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3" Oct 03 18:22:23 crc kubenswrapper[5081]: I1003 18:22:23.216410 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tkqzh_must-gather-9r2h4_db561d03-866c-4f07-9775-0c6f0a5cc7ff/gather/0.log" Oct 03 18:22:26 crc kubenswrapper[5081]: E1003 18:22:26.351160 5081 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.59:45240->38.102.83.59:44537: write tcp 38.102.83.59:45240->38.102.83.59:44537: write: broken pipe Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.132110 5081 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tkqzh/must-gather-9r2h4"] Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.132828 5081 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" podUID="db561d03-866c-4f07-9775-0c6f0a5cc7ff" containerName="copy" containerID="cri-o://4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0" gracePeriod=2 Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.156038 5081 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tkqzh/must-gather-9r2h4"] Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.597647 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tkqzh_must-gather-9r2h4_db561d03-866c-4f07-9775-0c6f0a5cc7ff/copy/0.log" Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.598388 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.695913 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr9mm\" (UniqueName: \"kubernetes.io/projected/db561d03-866c-4f07-9775-0c6f0a5cc7ff-kube-api-access-cr9mm\") pod \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.696191 5081 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/db561d03-866c-4f07-9775-0c6f0a5cc7ff-must-gather-output\") pod \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\" (UID: \"db561d03-866c-4f07-9775-0c6f0a5cc7ff\") " Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.702802 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db561d03-866c-4f07-9775-0c6f0a5cc7ff-kube-api-access-cr9mm" (OuterVolumeSpecName: "kube-api-access-cr9mm") pod "db561d03-866c-4f07-9775-0c6f0a5cc7ff" (UID: "db561d03-866c-4f07-9775-0c6f0a5cc7ff"). InnerVolumeSpecName "kube-api-access-cr9mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.799497 5081 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr9mm\" (UniqueName: \"kubernetes.io/projected/db561d03-866c-4f07-9775-0c6f0a5cc7ff-kube-api-access-cr9mm\") on node \"crc\" DevicePath \"\"" Oct 03 18:22:32 crc kubenswrapper[5081]: I1003 18:22:32.912759 5081 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db561d03-866c-4f07-9775-0c6f0a5cc7ff-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "db561d03-866c-4f07-9775-0c6f0a5cc7ff" (UID: "db561d03-866c-4f07-9775-0c6f0a5cc7ff"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.004400 5081 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/db561d03-866c-4f07-9775-0c6f0a5cc7ff-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.055138 5081 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tkqzh_must-gather-9r2h4_db561d03-866c-4f07-9775-0c6f0a5cc7ff/copy/0.log" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.055900 5081 generic.go:334] "Generic (PLEG): container finished" podID="db561d03-866c-4f07-9775-0c6f0a5cc7ff" containerID="4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0" exitCode=143 Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.055983 5081 scope.go:117] "RemoveContainer" containerID="4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.056017 5081 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tkqzh/must-gather-9r2h4" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.077491 5081 scope.go:117] "RemoveContainer" containerID="f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.162440 5081 scope.go:117] "RemoveContainer" containerID="4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0" Oct 03 18:22:33 crc kubenswrapper[5081]: E1003 18:22:33.162912 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0\": container with ID starting with 4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0 not found: ID does not exist" containerID="4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.162956 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0"} err="failed to get container status \"4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0\": rpc error: code = NotFound desc = could not find container \"4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0\": container with ID starting with 4ab258d3a531b60014e5b00474d5c3ea21a6967f4a4fb9f1037128d1511a1ce0 not found: ID does not exist" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.162988 5081 scope.go:117] "RemoveContainer" containerID="f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3" Oct 03 18:22:33 crc kubenswrapper[5081]: E1003 18:22:33.163386 5081 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3\": container with ID starting with f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3 not found: ID does not exist" containerID="f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.163446 5081 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3"} err="failed to get container status \"f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3\": rpc error: code = NotFound desc = could not find container \"f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3\": container with ID starting with f17a7fabfcdf8bb0c7911aeacab6f28127cd3c974d036db5524536ae037394b3 not found: ID does not exist" Oct 03 18:22:33 crc kubenswrapper[5081]: I1003 18:22:33.840484 5081 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db561d03-866c-4f07-9775-0c6f0a5cc7ff" path="/var/lib/kubelet/pods/db561d03-866c-4f07-9775-0c6f0a5cc7ff/volumes" Oct 03 18:23:30 crc kubenswrapper[5081]: I1003 18:23:30.704878 5081 scope.go:117] "RemoveContainer" containerID="d0fad12341ce6932b68db256e3cd7c5cc9b0b4afaa9620b00a6db0b9783dd7ed" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515070012256024443 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015070012257017361 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067765230016520 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067765231015471 5ustar corecore